From c0b7ffd5d0c48320d9e2da1594093923cfbb60f2 Mon Sep 17 00:00:00 2001 From: longbingljw Date: Thu, 20 Nov 2025 09:44:39 +0800 Subject: [PATCH] feat:mysql adaptation for metadb (#28188) --- .github/workflows/api-tests.yml | 2 +- .github/workflows/db-migration-test.yml | 61 +- .gitignore | 2 + api/.env.example | 27 +- api/README.md | 4 +- api/configs/middleware/__init__.py | 32 +- api/controllers/console/app/statistic.py | 75 +- api/core/rag/retrieval/dataset_retrieval.py | 64 +- api/core/tools/tool_manager.py | 20 +- .../knowledge_retrieval_node.py | 101 +- api/libs/helper.py | 9 + ...ef91f18_rename_api_provider_description.py | 32 +- ...9b_update_appmodelconfig_and_add_table_.py | 35 +- .../053da0c1d756_add_api_tool_privacy.py | 41 +- ...84c228_remove_tool_id_from_model_invoke.py | 16 +- ...c1af8d_add_dataset_permission_tenant_id.py | 19 +- ...16fa53d9faec_add_provider_model_support.py | 113 +- ...ab037c40_add_keyworg_table_storage_type.py | 15 +- ...3fcf12ba_support_conversation_variables.py | 44 +- ...0956-0251a1c768cc_add_tidb_auth_binding.py | 45 +- ...bb251_add_parent_message_id_to_messages.py | 14 +- ...-6af6a521a53e_update_retrieval_resource.py | 74 +- ...434-33f5fac87f29_external_knowledge_api.py | 84 +- ...a11becb_add_name_and_size_to_tool_files.py | 32 +- ..._10_22_0959-43fa78bc3b7d_add_white_list.py | 30 +- ...c4f75af5e_add_tenant_plugin_permisisons.py | 32 +- ...ce70a7ca_update_upload_files_source_url.py | 48 +- ...pdate_type_of_custom_disclaimer_to_text.py | 105 +- ...9b_update_workflows_graph_features_and_.py | 128 +- ...22_0701-e19037032219_parent_child_index.py | 72 +- ...b07f66c737_remove_unused_tool_providers.py | 41 +- ...52d42eb6_add_auto_disabled_dataset_logs.py | 36 +- ...4_0617-f051706725cc_add_rate_limit_logs.py | 33 +- ...0917-d20049ed0af6_add_metadata_function.py | 112 +- ..._add_marked_name_and_marked_comment_in_.py | 19 +- ...e1f5dfb_add_workflowdraftvariable_model.py | 62 +- ...w_draft_varaibles_add_node_execution_id.py | 40 +- ...93fe_add_mcp_server_tool_and_app_server.py | 112 +- ...1c9ba48be8e4_add_uuidv7_function_in_sql.py | 23 +- ...2025_07_04_1705-71f5020c6470_tool_oauth.py | 84 +- ...d07_add_tenant_plugin_autoupgrade_table.py | 47 +- ...32b3f888abf_manual_dataset_field_update.py | 18 +- ...1e_add_provider_credential_pool_support.py | 108 +- ...5fa_add_provider_model_multi_credential.py | 126 +- ...47-8d289573e1da_add_oauth_provider_apps.py | 43 +- ...20211f18133_add_headers_to_mcp_provider.py | 11 +- ...dd_credential_status_for_provider_table.py | 13 +- ...68519ad5cd18_knowledge_pipeline_migrate.py | 450 +++-- ...662b25d9bc_remove_builtin_template_user.py | 17 +- ...11-03f8dcbc611e_add_workflowpause_model.py | 43 +- ..._30_1518-669ffd70119c_introduce_trigger.py | 385 ++-- ...1_15_2102-09cfdda155d1_mysql_adaptation.py | 131 ++ ...9d_add_message_files_into_agent_thought.py | 16 +- .../246ba09cbbdb_add_app_anntation_setting.py | 54 +- .../versions/2a3aebbbf4bb_add_app_tracing.py | 14 +- ...2e9819ca5b28_add_tenant_id_in_api_token.py | 38 +- ...a5a70d_add_tool_labels_to_agent_thought.py | 16 +- .../3b18fea55204_add_tool_label_bings.py | 29 +- ...3c7cac9521c6_add_tags_and_binding_table.py | 70 +- .../3ef9b2b6bee6_add_assistant_app.py | 134 +- ...5564d_conversation_columns_set_nullable.py | 74 +- .../versions/4823da1d26cf_add_tool_file.py | 40 +- ...fee_change_message_chain_id_to_nullable.py | 42 +- ...d64aa4_update_dataset_model_field_null_.py | 66 +- .../4e99a8df00ff_add_load_balancing.py | 87 +- ...022897aaceb_add_model_name_in_embedding.py | 22 +- .../versions/53bf8af60645_update_model.py | 48 +- ...nable_tool_file_without_conversation_id.py | 38 +- .../614f77cecc48_add_last_active_at.py | 14 +- api/migrations/versions/64b051264f32_init.py | 1637 +++++++++++------ ...43972bdc_add_dataset_retriever_resource.py | 74 +- ...fb077b04_add_dataset_collection_binding.py | 43 +- ...39_add_anntation_history_match_response.py | 19 +- ...3755c_add_app_config_retriever_resource.py | 16 +- .../7b45942e39bb_add_api_key_auth_binding.py | 65 +- .../7bdef072e63a_add_workflow_tool.py | 55 +- .../7ce5a52e4eee_add_tool_providers.py | 53 +- ...a8693e07a_add_table_dataset_permissions.py | 33 +- ...8072f0caa04_add_custom_config_in_tenant.py | 16 +- api/migrations/versions/89c7899ca936_.py | 44 +- .../8d2d099ceb74_add_qa_model_support.py | 29 +- ...e_add_environment_variable_to_workflow_.py | 14 +- ...6f3c800_rename_api_provider_credentails.py | 16 +- .../8fe468ba0ca5_add_gpt4v_supports.py | 63 +- .../968fff4c0ab9_add_api_based_extension.py | 38 +- .../9f4e3427ea84_add_created_by_role.py | 35 +- ...4dfde53b_add_language_to_recommend_apps.py | 20 +- ...56fb053ef_app_config_add_speech_to_text.py | 16 +- ...d7385a7b66_add_embeddings_provider_name.py | 20 +- ...e_add_external_data_tools_in_app_model_.py | 16 +- api/migrations/versions/b24be59fbb04_.py | 16 +- .../versions/b289e2408ee2_add_workflow.py | 254 ++- ...09c049e8e_add_advanced_prompt_templates.py | 25 +- .../bf0aec5ba2cf_add_provider_order.py | 67 +- ...9_remove_app_model_config_trace_config_.py | 38 +- .../versions/c3311b089690_add_tool_meta.py | 16 +- .../c71211c8f604_add_tool_invoke_model_log.py | 76 +- ...998d4d_set_model_config_column_nullable.py | 72 +- .../e1901f623fd0_add_annotation_reply.py | 117 +- .../e2eacc9a1b63_add_status_for_message.py | 25 +- ...08af0a69ccefbb59fa80c778efee300bb780980.py | 56 +- .../e8883b0148c9_add_dataset_model_name.py | 19 +- ...e349e6ac_increase_max_model_name_length.py | 48 +- .../f25003750af4_add_created_updated_at.py | 19 +- ...85e260_add_anntation_history_message_id.py | 19 +- .../f9107f83abab_add_desc_for_apps.py | 16 +- ...fca025d3b60f_add_dataset_retrival_model.py | 56 +- ...7_remove_extra_tracing_app_config_table.py | 52 +- api/models/account.py | 41 +- api/models/api_based_extension.py | 9 +- api/models/base.py | 7 +- api/models/dataset.py | 167 +- api/models/model.py | 206 +-- api/models/oauth.py | 21 +- api/models/provider.py | 39 +- api/models/source.py | 14 +- api/models/task.py | 12 +- api/models/tools.py | 95 +- api/models/trigger.py | 38 +- api/models/types.py | 88 +- api/models/web.py | 11 +- api/models/workflow.py | 62 +- .../sqlalchemy_api_workflow_run_repository.py | 18 +- .../tools/workflow_tools_manage_service.py | 2 - .../workflow_draft_variable_service.py | 66 +- .../tools/test_tools_transform_service.py | 3 - docker/.env.example | 88 +- docker/docker-compose-template.yaml | 180 +- docker/docker-compose.middleware.yaml | 44 +- docker/docker-compose.yaml | 210 ++- docker/middleware.env.example | 48 +- 131 files changed, 6312 insertions(+), 2602 deletions(-) create mode 100644 api/migrations/versions/2025_11_15_2102-09cfdda155d1_mysql_adaptation.py diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 37d351627b..557d747a8c 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -62,7 +62,7 @@ jobs: compose-file: | docker/docker-compose.middleware.yaml services: | - db + db_postgres redis sandbox ssrf_proxy diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index b9961a4714..101d973466 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -8,7 +8,7 @@ concurrency: cancel-in-progress: true jobs: - db-migration-test: + db-migration-test-postgres: runs-on: ubuntu-latest steps: @@ -45,7 +45,7 @@ jobs: compose-file: | docker/docker-compose.middleware.yaml services: | - db + db_postgres redis - name: Prepare configs @@ -57,3 +57,60 @@ jobs: env: DEBUG: true run: uv run --directory api flask upgrade-db + + db-migration-test-mysql: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Setup UV and Python + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + python-version: "3.12" + cache-dependency-glob: api/uv.lock + + - name: Install dependencies + run: uv sync --project api + - name: Ensure Offline migration are supported + run: | + # upgrade + uv run --directory api flask db upgrade 'base:head' --sql + # downgrade + uv run --directory api flask db downgrade 'head:base' --sql + + - name: Prepare middleware env for MySQL + run: | + cd docker + cp middleware.env.example middleware.env + sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env + sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env + sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env + sed -i 's/DB_USERNAME=postgres/DB_USERNAME=mysql/' middleware.env + + - name: Set up Middlewares + uses: hoverkraft-tech/compose-action@v2.0.2 + with: + compose-file: | + docker/docker-compose.middleware.yaml + services: | + db_mysql + redis + + - name: Prepare configs for MySQL + run: | + cd api + cp .env.example .env + sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' .env + sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env + sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env + + - name: Run DB Migration + env: + DEBUG: true + run: uv run --directory api flask upgrade-db diff --git a/.gitignore b/.gitignore index c6067e96cd..79ba44b207 100644 --- a/.gitignore +++ b/.gitignore @@ -186,6 +186,8 @@ docker/volumes/couchbase/* docker/volumes/oceanbase/* docker/volumes/plugin_daemon/* docker/volumes/matrixone/* +docker/volumes/mysql/* +docker/volumes/seekdb/* !docker/volumes/oceanbase/init.d docker/nginx/conf.d/default.conf diff --git a/api/.env.example b/api/.env.example index 5713095374..ba512a668d 100644 --- a/api/.env.example +++ b/api/.env.example @@ -72,12 +72,15 @@ REDIS_CLUSTERS_PASSWORD= # celery configuration CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1 CELERY_BACKEND=redis -# PostgreSQL database configuration + +# Database configuration +DB_TYPE=postgresql DB_USERNAME=postgres DB_PASSWORD=difyai123456 DB_HOST=localhost DB_PORT=5432 DB_DATABASE=dify + SQLALCHEMY_POOL_PRE_PING=true SQLALCHEMY_POOL_TIMEOUT=30 @@ -163,7 +166,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,* COOKIE_DOMAIN= # Vector database configuration -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`. +# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`. VECTOR_STORE=weaviate # Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index @@ -174,6 +177,17 @@ WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih WEAVIATE_GRPC_ENABLED=false WEAVIATE_BATCH_SIZE=100 +# OceanBase Vector configuration +OCEANBASE_VECTOR_HOST=127.0.0.1 +OCEANBASE_VECTOR_PORT=2881 +OCEANBASE_VECTOR_USER=root@test +OCEANBASE_VECTOR_PASSWORD=difyai123456 +OCEANBASE_VECTOR_DATABASE=test +OCEANBASE_MEMORY_LIMIT=6G +OCEANBASE_ENABLE_HYBRID_SEARCH=false +OCEANBASE_FULLTEXT_PARSER=ik +SEEKDB_MEMORY_LIMIT=2G + # Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode QDRANT_URL=http://localhost:6333 QDRANT_API_KEY=difyai123456 @@ -339,15 +353,6 @@ LINDORM_PASSWORD=admin LINDORM_USING_UGC=True LINDORM_QUERY_TIMEOUT=1 -# OceanBase Vector configuration -OCEANBASE_VECTOR_HOST=127.0.0.1 -OCEANBASE_VECTOR_PORT=2881 -OCEANBASE_VECTOR_USER=root@test -OCEANBASE_VECTOR_PASSWORD=difyai123456 -OCEANBASE_VECTOR_DATABASE=test -OCEANBASE_MEMORY_LIMIT=6G -OCEANBASE_ENABLE_HYBRID_SEARCH=false - # AlibabaCloud MySQL Vector configuration ALIBABACLOUD_MYSQL_HOST=127.0.0.1 ALIBABACLOUD_MYSQL_PORT=3306 diff --git a/api/README.md b/api/README.md index 7809ea8a3d..2267183031 100644 --- a/api/README.md +++ b/api/README.md @@ -15,8 +15,8 @@ ```bash cd ../docker cp middleware.env.example middleware.env - # change the profile to other vector database if you are not using weaviate - docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d + # change the profile to mysql if you are not using postgres,change the profile to other vector database if you are not using weaviate + docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d cd ../api ``` diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 816d0e442f..a5e35c99ca 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -105,6 +105,12 @@ class KeywordStoreConfig(BaseSettings): class DatabaseConfig(BaseSettings): + # Database type selector + DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field( + description="Database type to use. OceanBase is MySQL-compatible.", + default="postgresql", + ) + DB_HOST: str = Field( description="Hostname or IP address of the database server.", default="localhost", @@ -140,10 +146,10 @@ class DatabaseConfig(BaseSettings): default="", ) - SQLALCHEMY_DATABASE_URI_SCHEME: str = Field( - description="Database URI scheme for SQLAlchemy connection.", - default="postgresql", - ) + @computed_field # type: ignore[prop-decorator] + @property + def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str: + return "postgresql" if self.DB_TYPE == "postgresql" else "mysql+pymysql" @computed_field # type: ignore[prop-decorator] @property @@ -204,15 +210,15 @@ class DatabaseConfig(BaseSettings): # Parse DB_EXTRAS for 'options' db_extras_dict = dict(parse_qsl(self.DB_EXTRAS)) options = db_extras_dict.get("options", "") - # Always include timezone - timezone_opt = "-c timezone=UTC" - if options: - # Merge user options and timezone - merged_options = f"{options} {timezone_opt}" - else: - merged_options = timezone_opt - - connect_args = {"options": merged_options} + connect_args = {} + # Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property + if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"): + timezone_opt = "-c timezone=UTC" + if options: + merged_options = f"{options} {timezone_opt}" + else: + merged_options = timezone_opt + connect_args = {"options": merged_options} return { "pool_size": self.SQLALCHEMY_POOL_SIZE, diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 37ed3d9e27..b4bd05e891 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -10,9 +10,9 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.datetime_utils import parse_time_range -from libs.helper import DatetimeString +from libs.helper import DatetimeString, convert_datetime_to_date from libs.login import current_account_with_tenant, login_required -from models import AppMode, Message +from models import AppMode @console_ns.route("/apps//statistics/daily-messages") @@ -44,8 +44,9 @@ class DailyMessageStatistic(Resource): ) args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, COUNT(*) AS message_count FROM messages @@ -106,6 +107,17 @@ class DailyConversationStatistic(Resource): account, _ = current_account_with_tenant() args = parser.parse_args() + + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, + COUNT(DISTINCT conversation_id) AS conversation_count +FROM + messages +WHERE + app_id = :app_id + AND invoke_from != :invoke_from""" + arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} assert account.timezone is not None try: @@ -113,30 +125,21 @@ class DailyConversationStatistic(Resource): except ValueError as e: abort(400, description=str(e)) - stmt = ( - sa.select( - sa.func.date( - sa.func.date_trunc("day", sa.text("created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz")) - ).label("date"), - sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"), - ) - .select_from(Message) - .where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER) - ) - if start_datetime_utc: - stmt = stmt.where(Message.created_at >= start_datetime_utc) + sql_query += " AND created_at >= :start" + arg_dict["start"] = start_datetime_utc if end_datetime_utc: - stmt = stmt.where(Message.created_at < end_datetime_utc) + sql_query += " AND created_at < :end" + arg_dict["end"] = end_datetime_utc - stmt = stmt.group_by("date").order_by("date") + sql_query += " GROUP BY date ORDER BY date" response_data = [] with db.engine.begin() as conn: - rs = conn.execute(stmt, {"tz": account.timezone}) - for row in rs: - response_data.append({"date": str(row.date), "conversation_count": row.conversation_count}) + rs = conn.execute(sa.text(sql_query), arg_dict) + for i in rs: + response_data.append({"date": str(i.date), "conversation_count": i.conversation_count}) return jsonify({"data": response_data}) @@ -161,8 +164,9 @@ class DailyTerminalsStatistic(Resource): args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, COUNT(DISTINCT messages.from_end_user_id) AS terminal_count FROM messages @@ -217,8 +221,9 @@ class DailyTokenCostStatistic(Resource): args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, (SUM(messages.message_tokens) + SUM(messages.answer_tokens)) AS token_count, SUM(total_price) AS total_price FROM @@ -276,8 +281,9 @@ class AverageSessionInteractionStatistic(Resource): args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("c.created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, AVG(subquery.message_count) AS interactions FROM ( @@ -351,8 +357,9 @@ class UserSatisfactionRateStatistic(Resource): args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("m.created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, COUNT(m.id) AS message_count, COUNT(mf.id) AS feedback_count FROM @@ -416,8 +423,9 @@ class AverageResponseTimeStatistic(Resource): args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, AVG(provider_response_latency) AS latency FROM messages @@ -471,8 +479,9 @@ class TokensPerSecondStatistic(Resource): account, _ = current_account_with_tenant() args = parser.parse_args() - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, CASE WHEN SUM(provider_response_latency) = 0 THEN 0 ELSE (SUM(answer_tokens) / SUM(provider_response_latency)) diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 45b19f25a0..3db67efb0e 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -7,8 +7,7 @@ from collections.abc import Generator, Mapping from typing import Any, Union, cast from flask import Flask, current_app -from sqlalchemy import Float, and_, or_, select, text -from sqlalchemy import cast as sqlalchemy_cast +from sqlalchemy import and_, or_, select from core.app.app_config.entities import ( DatasetEntity, @@ -1023,60 +1022,55 @@ class DatasetRetrieval: self, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list ): if value is None and condition not in ("empty", "not empty"): - return + return filters + + json_field = DatasetDocument.doc_metadata[metadata_name].as_string() - key = f"{metadata_name}_{sequence}" - key_value = f"{metadata_name}_{sequence}_value" match condition: case "contains": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}%"} - ) - ) + filters.append(json_field.like(f"%{value}%")) + case "not contains": - filters.append( - (text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}%"} - ) - ) + filters.append(json_field.notlike(f"%{value}%")) + case "start with": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"{value}%"} - ) - ) + filters.append(json_field.like(f"{value}%")) case "end with": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}"} - ) - ) + filters.append(json_field.like(f"%{value}")) + case "is" | "=": if isinstance(value, str): - filters.append(DatasetDocument.doc_metadata[metadata_name] == f'"{value}"') - else: - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) == value) + filters.append(json_field == value) + elif isinstance(value, (int, float)): + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() == value) + case "is not" | "≠": if isinstance(value, str): - filters.append(DatasetDocument.doc_metadata[metadata_name] != f'"{value}"') - else: - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) != value) + filters.append(json_field != value) + elif isinstance(value, (int, float)): + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() != value) + case "empty": filters.append(DatasetDocument.doc_metadata[metadata_name].is_(None)) + case "not empty": filters.append(DatasetDocument.doc_metadata[metadata_name].isnot(None)) + case "before" | "<": - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) < value) + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() < value) + case "after" | ">": - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) > value) + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() > value) + case "≤" | "<=": - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) <= value) + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() <= value) + case "≥" | ">=": - filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) >= value) + filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() >= value) case _: pass + return filters def _fetch_model_config( diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index daf3772d30..0beb42479b 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -13,6 +13,7 @@ from sqlalchemy.orm import Session from yarl import URL import contexts +from configs import dify_config from core.helper.provider_cache import ToolProviderCredentialsCache from core.plugin.impl.tool import PluginToolManager from core.tools.__base.tool_provider import ToolProviderController @@ -32,7 +33,6 @@ from services.tools.mcp_tools_manage_service import MCPToolManageService if TYPE_CHECKING: from core.workflow.nodes.tool.entities import ToolEntity -from configs import dify_config from core.agent.entities import AgentToolEntity from core.app.entities.app_invoke_entities import InvokeFrom from core.helper.module_import_helper import load_single_subclass_from_source @@ -618,12 +618,28 @@ class ToolManager: """ # according to multi credentials, select the one with is_default=True first, then created_at oldest # for compatibility with old version - sql = """ + if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql": + # PostgreSQL: Use DISTINCT ON + sql = """ SELECT DISTINCT ON (tenant_id, provider) id FROM tool_builtin_providers WHERE tenant_id = :tenant_id ORDER BY tenant_id, provider, is_default DESC, created_at DESC """ + else: + # MySQL: Use window function to achieve same result + sql = """ + SELECT id FROM ( + SELECT id, + ROW_NUMBER() OVER ( + PARTITION BY tenant_id, provider + ORDER BY is_default DESC, created_at DESC + ) as rn + FROM tool_builtin_providers + WHERE tenant_id = :tenant_id + ) ranked WHERE rn = 1 + """ + with Session(db.engine, autoflush=False) as session: ids = [row.id for row in session.execute(sa.text(sql), {"tenant_id": tenant_id}).all()] return session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all() diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 4a63900527..e8ee44d5a9 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -6,8 +6,7 @@ from collections import defaultdict from collections.abc import Mapping, Sequence from typing import TYPE_CHECKING, Any, cast -from sqlalchemy import Float, and_, func, or_, select, text -from sqlalchemy import cast as sqlalchemy_cast +from sqlalchemy import and_, func, literal, or_, select from sqlalchemy.orm import sessionmaker from core.app.app_config.entities import DatasetRetrieveConfigEntity @@ -597,79 +596,79 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node): if value is None and condition not in ("empty", "not empty"): return filters - key = f"{metadata_name}_{sequence}" - key_value = f"{metadata_name}_{sequence}_value" + json_field = Document.doc_metadata[metadata_name].as_string() + match condition: case "contains": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}%"} - ) - ) + filters.append(json_field.like(f"%{value}%")) + case "not contains": - filters.append( - (text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}%"} - ) - ) + filters.append(json_field.notlike(f"%{value}%")) + case "start with": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"{value}%"} - ) - ) + filters.append(json_field.like(f"{value}%")) + case "end with": - filters.append( - (text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params( - **{key: metadata_name, key_value: f"%{value}"} - ) - ) + filters.append(json_field.like(f"%{value}")) case "in": if isinstance(value, str): - escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")] - escaped_value_str = ",".join(escaped_values) + value_list = [v.strip() for v in value.split(",") if v.strip()] + elif isinstance(value, (list, tuple)): + value_list = [str(v) for v in value if v is not None] else: - escaped_value_str = str(value) - filters.append( - (text(f"documents.doc_metadata ->> :{key} = any(string_to_array(:{key_value},','))")).params( - **{key: metadata_name, key_value: escaped_value_str} - ) - ) + value_list = [str(value)] if value is not None else [] + + if not value_list: + filters.append(literal(False)) + else: + filters.append(json_field.in_(value_list)) + case "not in": if isinstance(value, str): - escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")] - escaped_value_str = ",".join(escaped_values) + value_list = [v.strip() for v in value.split(",") if v.strip()] + elif isinstance(value, (list, tuple)): + value_list = [str(v) for v in value if v is not None] else: - escaped_value_str = str(value) - filters.append( - (text(f"documents.doc_metadata ->> :{key} != all(string_to_array(:{key_value},','))")).params( - **{key: metadata_name, key_value: escaped_value_str} - ) - ) - case "=" | "is": + value_list = [str(value)] if value is not None else [] + + if not value_list: + filters.append(literal(True)) + else: + filters.append(json_field.notin_(value_list)) + + case "is" | "=": if isinstance(value, str): - filters.append(Document.doc_metadata[metadata_name] == f'"{value}"') - else: - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) == value) + filters.append(json_field == value) + elif isinstance(value, (int, float)): + filters.append(Document.doc_metadata[metadata_name].as_float() == value) + case "is not" | "≠": if isinstance(value, str): - filters.append(Document.doc_metadata[metadata_name] != f'"{value}"') - else: - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) != value) + filters.append(json_field != value) + elif isinstance(value, (int, float)): + filters.append(Document.doc_metadata[metadata_name].as_float() != value) + case "empty": filters.append(Document.doc_metadata[metadata_name].is_(None)) + case "not empty": filters.append(Document.doc_metadata[metadata_name].isnot(None)) + case "before" | "<": - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) < value) + filters.append(Document.doc_metadata[metadata_name].as_float() < value) + case "after" | ">": - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) > value) + filters.append(Document.doc_metadata[metadata_name].as_float() > value) + case "≤" | "<=": - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) <= value) + filters.append(Document.doc_metadata[metadata_name].as_float() <= value) + case "≥" | ">=": - filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) >= value) + filters.append(Document.doc_metadata[metadata_name].as_float() >= value) + case _: pass + return filters @classmethod diff --git a/api/libs/helper.py b/api/libs/helper.py index 60484dd40b..1013c3b878 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -177,6 +177,15 @@ def timezone(timezone_string): raise ValueError(error) +def convert_datetime_to_date(field, target_timezone: str = ":tz"): + if dify_config.DB_TYPE == "postgresql": + return f"DATE(DATE_TRUNC('day', {field} AT TIME ZONE 'UTC' AT TIME ZONE {target_timezone}))" + elif dify_config.DB_TYPE == "mysql": + return f"DATE(CONVERT_TZ({field}, 'UTC', {target_timezone}))" + else: + raise NotImplementedError(f"Unsupported database type: {dify_config.DB_TYPE}") + + def generate_string(n): letters_digits = string.ascii_letters + string.digits result = "" diff --git a/api/migrations/versions/00bacef91f18_rename_api_provider_description.py b/api/migrations/versions/00bacef91f18_rename_api_provider_description.py index 5ae9e8769a..17ed067d81 100644 --- a/api/migrations/versions/00bacef91f18_rename_api_provider_description.py +++ b/api/migrations/versions/00bacef91f18_rename_api_provider_description.py @@ -8,6 +8,12 @@ Create Date: 2024-01-07 04:07:34.482983 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '00bacef91f18' down_revision = '8ec536f3c800' @@ -17,17 +23,31 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description', sa.Text(), nullable=False)) - batch_op.drop_column('description_str') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description', sa.Text(), nullable=False)) + batch_op.drop_column('description_str') + else: + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False)) + batch_op.drop_column('description_str') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False)) - batch_op.drop_column('description') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False)) + batch_op.drop_column('description') + else: + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False)) + batch_op.drop_column('description') # ### end Alembic commands ### diff --git a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py index 153861a71a..f64e16db7f 100644 --- a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py +++ b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '04c602f5dc9b' down_revision = '4ff534e1eb11' @@ -19,15 +23,28 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tracing_app_configs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('tracing_provider', sa.String(length=255), nullable=True), - sa.Column('tracing_config', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tracing_app_configs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') + ) + else: + op.create_table('tracing_app_configs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/053da0c1d756_add_api_tool_privacy.py b/api/migrations/versions/053da0c1d756_add_api_tool_privacy.py index a589f1f08b..2f54763f00 100644 --- a/api/migrations/versions/053da0c1d756_add_api_tool_privacy.py +++ b/api/migrations/versions/053da0c1d756_add_api_tool_privacy.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '053da0c1d756' down_revision = '4829e54d2fee' @@ -18,16 +24,31 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_conversation_variables', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('variables_str', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_conversation_variables', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('variables_str', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey') + ) + else: + op.create_table('tool_conversation_variables', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('variables_str', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey') + ) + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), nullable=True)) batch_op.alter_column('icon', diff --git a/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py b/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py index 58863fe3a7..ed70bf5d08 100644 --- a/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py +++ b/api/migrations/versions/114eed84c228_remove_tool_id_from_model_invoke.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '114eed84c228' down_revision = 'c71211c8f604' @@ -26,7 +32,13 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: - batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False)) + else: + with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py b/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py index 8907f78117..509bd5d0e8 100644 --- a/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py +++ b/api/migrations/versions/161cadc1af8d_add_dataset_permission_tenant_id.py @@ -8,7 +8,11 @@ Create Date: 2024-07-05 14:30:59.472593 import sqlalchemy as sa from alembic import op -import models as models +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" # revision identifiers, used by Alembic. revision = '161cadc1af8d' @@ -19,9 +23,16 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: - # Step 1: Add column without NOT NULL constraint - op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: + # Step 1: Add column without NOT NULL constraint + op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False)) + else: + with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: + # Step 1: Add column without NOT NULL constraint + op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/16fa53d9faec_add_provider_model_support.py b/api/migrations/versions/16fa53d9faec_add_provider_model_support.py index 6791cf4578..ce24a20172 100644 --- a/api/migrations/versions/16fa53d9faec_add_provider_model_support.py +++ b/api/migrations/versions/16fa53d9faec_add_provider_model_support.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '16fa53d9faec' down_revision = '8d2d099ceb74' @@ -18,44 +24,87 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('provider_models', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('model_name', sa.String(length=40), nullable=False), - sa.Column('model_type', sa.String(length=40), nullable=False), - sa.Column('encrypted_config', sa.Text(), nullable=True), - sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_model_pkey'), - sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('provider_models', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=True), + sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name') + ) + else: + op.create_table('provider_models', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('encrypted_config', models.types.LongText(), nullable=True), + sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name') + ) + with op.batch_alter_table('provider_models', schema=None) as batch_op: batch_op.create_index('provider_model_tenant_id_provider_idx', ['tenant_id', 'provider_name'], unique=False) - op.create_table('tenant_default_models', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('model_name', sa.String(length=40), nullable=False), - sa.Column('model_type', sa.String(length=40), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey') - ) + if _is_pg(conn): + op.create_table('tenant_default_models', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey') + ) + else: + op.create_table('tenant_default_models', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey') + ) + with op.batch_alter_table('tenant_default_models', schema=None) as batch_op: batch_op.create_index('tenant_default_model_tenant_id_provider_type_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False) - op.create_table('tenant_preferred_model_providers', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('preferred_provider_type', sa.String(length=40), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey') - ) + if _is_pg(conn): + op.create_table('tenant_preferred_model_providers', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('preferred_provider_type', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey') + ) + else: + op.create_table('tenant_preferred_model_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('preferred_provider_type', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey') + ) + with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op: batch_op.create_index('tenant_preferred_model_provider_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False) diff --git a/api/migrations/versions/17b5ab037c40_add_keyworg_table_storage_type.py b/api/migrations/versions/17b5ab037c40_add_keyworg_table_storage_type.py index 7707148489..4ce073318a 100644 --- a/api/migrations/versions/17b5ab037c40_add_keyworg_table_storage_type.py +++ b/api/migrations/versions/17b5ab037c40_add_keyworg_table_storage_type.py @@ -8,6 +8,10 @@ Create Date: 2024-04-01 09:48:54.232201 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '17b5ab037c40' down_revision = 'a8f9b3c45e4a' @@ -17,9 +21,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - - with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op: - batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op: + batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False)) + else: + with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op: + batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_08_13_0633-63a83fcf12ba_support_conversation_variables.py b/api/migrations/versions/2024_08_13_0633-63a83fcf12ba_support_conversation_variables.py index 16e1efd4ef..e8d725e78c 100644 --- a/api/migrations/versions/2024_08_13_0633-63a83fcf12ba_support_conversation_variables.py +++ b/api/migrations/versions/2024_08_13_0633-63a83fcf12ba_support_conversation_variables.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '63a83fcf12ba' down_revision = '1787fbae959a' @@ -19,21 +23,39 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('workflow__conversation_variables', - sa.Column('id', models.types.StringUUID(), nullable=False), - sa.Column('conversation_id', models.types.StringUUID(), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('data', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey')) - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('workflow__conversation_variables', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('data', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey')) + ) + else: + op.create_table('workflow__conversation_variables', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('data', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey')) + ) + with op.batch_alter_table('workflow__conversation_variables', schema=None) as batch_op: batch_op.create_index(batch_op.f('workflow__conversation_variables_app_id_idx'), ['app_id'], unique=False) batch_op.create_index(batch_op.f('workflow__conversation_variables_created_at_idx'), ['created_at'], unique=False) - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False)) + if _is_pg(conn): + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False)) + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('conversation_variables', models.types.LongText(), default='{}', nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py index ca2e410442..1e6743fba8 100644 --- a/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py +++ b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '0251a1c768cc' down_revision = 'bbadea11becb' @@ -19,18 +23,35 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tidb_auth_bindings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=True), - sa.Column('cluster_id', sa.String(length=255), nullable=False), - sa.Column('cluster_name', sa.String(length=255), nullable=False), - sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False), - sa.Column('account', sa.String(length=255), nullable=False), - sa.Column('password', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tidb_auth_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('cluster_id', sa.String(length=255), nullable=False), + sa.Column('cluster_name', sa.String(length=255), nullable=False), + sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False), + sa.Column('account', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey') + ) + else: + op.create_table('tidb_auth_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('cluster_id', sa.String(length=255), nullable=False), + sa.Column('cluster_name', sa.String(length=255), nullable=False), + sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'"), nullable=False), + sa.Column('account', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey') + ) + with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op: batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False) batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False) diff --git a/api/migrations/versions/2024_09_11_1012-d57ba9ebb251_add_parent_message_id_to_messages.py b/api/migrations/versions/2024_09_11_1012-d57ba9ebb251_add_parent_message_id_to_messages.py index fd957eeafb..2c8bb2de89 100644 --- a/api/migrations/versions/2024_09_11_1012-d57ba9ebb251_add_parent_message_id_to_messages.py +++ b/api/migrations/versions/2024_09_11_1012-d57ba9ebb251_add_parent_message_id_to_messages.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'd57ba9ebb251' down_revision = '675b5321501b' @@ -22,8 +26,14 @@ def upgrade(): with op.batch_alter_table('messages', schema=None) as batch_op: batch_op.add_column(sa.Column('parent_message_id', models.types.StringUUID(), nullable=True)) - # Set parent_message_id for existing messages to uuid_nil() to distinguish them from new messages with actual parent IDs or NULLs - op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL') + # Set parent_message_id for existing messages to distinguish them from new messages with actual parent IDs or NULLs + conn = op.get_bind() + if _is_pg(conn): + # PostgreSQL: Use uuid_nil() function + op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL') + else: + # MySQL: Use a specific UUID value to represent nil + op.execute("UPDATE messages SET parent_message_id = '00000000-0000-0000-0000-000000000000' WHERE parent_message_id IS NULL") # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py index 5337b340db..0767b725f6 100644 --- a/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py +++ b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py @@ -6,7 +6,11 @@ Create Date: 2024-09-24 09:22:43.570120 """ from alembic import op -import models as models +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa from sqlalchemy.dialects import postgresql @@ -19,30 +23,58 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('document_id', - existing_type=sa.UUID(), - nullable=True) - batch_op.alter_column('data_source_type', - existing_type=sa.TEXT(), - nullable=True) - batch_op.alter_column('segment_id', - existing_type=sa.UUID(), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('document_id', + existing_type=sa.UUID(), + nullable=True) + batch_op.alter_column('data_source_type', + existing_type=sa.TEXT(), + nullable=True) + batch_op.alter_column('segment_id', + existing_type=sa.UUID(), + nullable=True) + else: + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('document_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('data_source_type', + existing_type=models.types.LongText(), + nullable=True) + batch_op.alter_column('segment_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: - batch_op.alter_column('segment_id', - existing_type=sa.UUID(), - nullable=False) - batch_op.alter_column('data_source_type', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('document_id', - existing_type=sa.UUID(), - nullable=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('segment_id', + existing_type=sa.UUID(), + nullable=False) + batch_op.alter_column('data_source_type', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('document_id', + existing_type=sa.UUID(), + nullable=False) + else: + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('segment_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.alter_column('data_source_type', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('document_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py b/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py index 3cb76e72c1..ac81d13c61 100644 --- a/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py +++ b/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '33f5fac87f29' down_revision = '6af6a521a53e' @@ -19,34 +23,66 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('external_knowledge_apis', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.String(length=255), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('settings', sa.Text(), nullable=True), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('external_knowledge_apis', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('settings', sa.Text(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey') + ) + else: + op.create_table('external_knowledge_apis', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('settings', models.types.LongText(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey') + ) + with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op: batch_op.create_index('external_knowledge_apis_name_idx', ['name'], unique=False) batch_op.create_index('external_knowledge_apis_tenant_idx', ['tenant_id'], unique=False) - op.create_table('external_knowledge_bindings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('external_knowledge_id', sa.Text(), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey') - ) + if _is_pg(conn): + op.create_table('external_knowledge_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_id', sa.Text(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey') + ) + else: + op.create_table('external_knowledge_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_id', sa.String(length=512), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey') + ) + with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op: batch_op.create_index('external_knowledge_bindings_dataset_idx', ['dataset_id'], unique=False) batch_op.create_index('external_knowledge_bindings_external_knowledge_api_idx', ['external_knowledge_api_id'], unique=False) diff --git a/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py b/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py index 00f2b15802..33266ba5dd 100644 --- a/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py +++ b/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py @@ -16,6 +16,10 @@ branch_labels = None depends_on = None +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + def upgrade(): def _has_name_or_size_column() -> bool: # We cannot access the database in offline mode, so assume @@ -46,14 +50,26 @@ def upgrade(): if _has_name_or_size_column(): return - with op.batch_alter_table("tool_files", schema=None) as batch_op: - batch_op.add_column(sa.Column("name", sa.String(), nullable=True)) - batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True)) - op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL") - op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL") - with op.batch_alter_table("tool_files", schema=None) as batch_op: - batch_op.alter_column("name", existing_type=sa.String(), nullable=False) - batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False) + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table("tool_files", schema=None) as batch_op: + batch_op.add_column(sa.Column("name", sa.String(), nullable=True)) + batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True)) + op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL") + op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL") + with op.batch_alter_table("tool_files", schema=None) as batch_op: + batch_op.alter_column("name", existing_type=sa.String(), nullable=False) + batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table("tool_files", schema=None) as batch_op: + batch_op.add_column(sa.Column("name", sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True)) + op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL") + op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL") + with op.batch_alter_table("tool_files", schema=None) as batch_op: + batch_op.alter_column("name", existing_type=sa.String(length=255), nullable=False) + batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py index 9daf148bc4..22ee0ec195 100644 --- a/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py +++ b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '43fa78bc3b7d' down_revision = '0251a1c768cc' @@ -19,13 +23,25 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('whitelists', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=True), - sa.Column('category', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='whitelists_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('whitelists', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='whitelists_pkey') + ) + else: + op.create_table('whitelists', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='whitelists_pkey') + ) + with op.batch_alter_table('whitelists', schema=None) as batch_op: batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False) diff --git a/api/migrations/versions/2024_10_28_0720-08ec4f75af5e_add_tenant_plugin_permisisons.py b/api/migrations/versions/2024_10_28_0720-08ec4f75af5e_add_tenant_plugin_permisisons.py index 51a0b1b211..666d046bb9 100644 --- a/api/migrations/versions/2024_10_28_0720-08ec4f75af5e_add_tenant_plugin_permisisons.py +++ b/api/migrations/versions/2024_10_28_0720-08ec4f75af5e_add_tenant_plugin_permisisons.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '08ec4f75af5e' down_revision = 'ddcc8bbef391' @@ -19,14 +23,26 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('account_plugin_permissions', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False), - sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False), - sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'), - sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('account_plugin_permissions', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False), + sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False), + sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'), + sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin') + ) + else: + op.create_table('account_plugin_permissions', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False), + sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False), + sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'), + sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py b/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py index 222379a490..b3fe1e9fab 100644 --- a/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py +++ b/api/migrations/versions/2024_11_01_0540-f4d7ce70a7ca_update_upload_files_source_url.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'f4d7ce70a7ca' down_revision = '93ad8c19c40b' @@ -19,23 +23,43 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('upload_files', schema=None) as batch_op: - batch_op.alter_column('source_url', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - existing_nullable=False, - existing_server_default=sa.text("''::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + else: + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + existing_nullable=False, + existing_default=sa.text("''")) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('upload_files', schema=None) as batch_op: - batch_op.alter_column('source_url', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - existing_nullable=False, - existing_server_default=sa.text("''::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + else: + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.alter_column('source_url', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + existing_nullable=False, + existing_default=sa.text("''")) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py index 9a4ccf352d..45842295ea 100644 --- a/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py +++ b/api/migrations/versions/2024_11_01_0622-d07474999927_update_type_of_custom_disclaimer_to_text.py @@ -7,6 +7,9 @@ Create Date: 2024-11-01 06:22:27.981398 """ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa from sqlalchemy.dialects import postgresql @@ -19,49 +22,91 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) + if _is_pg(conn): + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.VARCHAR(length=255), - type_=sa.TEXT(), - nullable=False) + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=sa.TEXT(), + nullable=False) + else: + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) + + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) + + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.alter_column('custom_disclaimer', - existing_type=sa.TEXT(), - type_=sa.VARCHAR(length=255), - nullable=True) + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=255), + nullable=True) + else: + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) + + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) + + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.alter_column('custom_disclaimer', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + nullable=True) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py index 117a7351cd..fdd8984029 100644 --- a/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py +++ b/api/migrations/versions/2024_11_01_0623-09a8d1878d9b_update_workflows_graph_features_and_.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '09a8d1878d9b' down_revision = 'd07474999927' @@ -19,55 +23,103 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('inputs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) - with op.batch_alter_table('messages', schema=None) as batch_op: - batch_op.alter_column('inputs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=False) + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) + else: + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=sa.JSON(), + nullable=False) + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=sa.JSON(), + nullable=False) op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL") op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL") op.execute("UPDATE workflows SET features = '' WHERE features IS NULL") - - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('graph', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('features', - existing_type=sa.TEXT(), - nullable=False) - batch_op.alter_column('updated_at', - existing_type=postgresql.TIMESTAMP(), - nullable=False) - + if _is_pg(conn): + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('graph', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('features', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('updated_at', + existing_type=postgresql.TIMESTAMP(), + nullable=False) + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('graph', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('features', + existing_type=models.types.LongText(), + nullable=False) + batch_op.alter_column('updated_at', + existing_type=sa.TIMESTAMP(), + nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.alter_column('updated_at', - existing_type=postgresql.TIMESTAMP(), - nullable=True) - batch_op.alter_column('features', - existing_type=sa.TEXT(), - nullable=True) - batch_op.alter_column('graph', - existing_type=sa.TEXT(), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=postgresql.TIMESTAMP(), + nullable=True) + batch_op.alter_column('features', + existing_type=sa.TEXT(), + nullable=True) + batch_op.alter_column('graph', + existing_type=sa.TEXT(), + nullable=True) + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=sa.TIMESTAMP(), + nullable=True) + batch_op.alter_column('features', + existing_type=models.types.LongText(), + nullable=True) + batch_op.alter_column('graph', + existing_type=models.types.LongText(), + nullable=True) - with op.batch_alter_table('messages', schema=None) as batch_op: - batch_op.alter_column('inputs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=True) + if _is_pg(conn): + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('inputs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=True) + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) + else: + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=sa.JSON(), + nullable=True) + + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('inputs', + existing_type=sa.JSON(), + nullable=True) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_11_22_0701-e19037032219_parent_child_index.py b/api/migrations/versions/2024_11_22_0701-e19037032219_parent_child_index.py index 9238e5a0a8..14048baa30 100644 --- a/api/migrations/versions/2024_11_22_0701-e19037032219_parent_child_index.py +++ b/api/migrations/versions/2024_11_22_0701-e19037032219_parent_child_index.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + # revision identifiers, used by Alembic. revision = 'e19037032219' down_revision = 'd7999dfa4aae' @@ -19,27 +23,53 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('child_chunks', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('document_id', models.types.StringUUID(), nullable=False), - sa.Column('segment_id', models.types.StringUUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('content', sa.Text(), nullable=False), - sa.Column('word_count', sa.Integer(), nullable=False), - sa.Column('index_node_id', sa.String(length=255), nullable=True), - sa.Column('index_node_hash', sa.String(length=255), nullable=True), - sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('indexing_at', sa.DateTime(), nullable=True), - sa.Column('completed_at', sa.DateTime(), nullable=True), - sa.Column('error', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id', name='child_chunk_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('child_chunks', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('segment_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('word_count', sa.Integer(), nullable=False), + sa.Column('index_node_id', sa.String(length=255), nullable=True), + sa.Column('index_node_hash', sa.String(length=255), nullable=True), + sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('indexing_at', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('error', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id', name='child_chunk_pkey') + ) + else: + op.create_table('child_chunks', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('segment_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('content', models.types.LongText(), nullable=False), + sa.Column('word_count', sa.Integer(), nullable=False), + sa.Column('index_node_id', sa.String(length=255), nullable=True), + sa.Column('index_node_hash', sa.String(length=255), nullable=True), + sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('indexing_at', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('error', models.types.LongText(), nullable=True), + sa.PrimaryKeyConstraint('id', name='child_chunk_pkey') + ) + with op.batch_alter_table('child_chunks', schema=None) as batch_op: batch_op.create_index('child_chunk_dataset_id_idx', ['tenant_id', 'dataset_id', 'document_id', 'segment_id', 'index_node_id'], unique=False) diff --git a/api/migrations/versions/2024_12_19_1746-11b07f66c737_remove_unused_tool_providers.py b/api/migrations/versions/2024_12_19_1746-11b07f66c737_remove_unused_tool_providers.py index 881a9e3c1e..7be99fe09a 100644 --- a/api/migrations/versions/2024_12_19_1746-11b07f66c737_remove_unused_tool_providers.py +++ b/api/migrations/versions/2024_12_19_1746-11b07f66c737_remove_unused_tool_providers.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '11b07f66c737' down_revision = 'cf8f4fc45278' @@ -25,15 +29,30 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_providers', - sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False), - sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False), - sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False), - sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), - sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False), - sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_providers', + sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False), + sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False), + sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False), + sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') + ) + else: + op.create_table('tool_providers', + sa.Column('id', models.types.StringUUID(), autoincrement=False, nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), autoincrement=False, nullable=False), + sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False), + sa.Column('encrypted_credentials', models.types.LongText(), autoincrement=False, nullable=True), + sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_12_25_1137-923752d42eb6_add_auto_disabled_dataset_logs.py b/api/migrations/versions/2024_12_25_1137-923752d42eb6_add_auto_disabled_dataset_logs.py index 6dadd4e4a8..750a3d02e2 100644 --- a/api/migrations/versions/2024_12_25_1137-923752d42eb6_add_auto_disabled_dataset_logs.py +++ b/api/migrations/versions/2024_12_25_1137-923752d42eb6_add_auto_disabled_dataset_logs.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '923752d42eb6' down_revision = 'e19037032219' @@ -19,15 +23,29 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('dataset_auto_disable_logs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('document_id', models.types.StringUUID(), nullable=False), - sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('dataset_auto_disable_logs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey') + ) + else: + op.create_table('dataset_auto_disable_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey') + ) + with op.batch_alter_table('dataset_auto_disable_logs', schema=None) as batch_op: batch_op.create_index('dataset_auto_disable_log_created_atx', ['created_at'], unique=False) batch_op.create_index('dataset_auto_disable_log_dataset_idx', ['dataset_id'], unique=False) diff --git a/api/migrations/versions/2025_01_14_0617-f051706725cc_add_rate_limit_logs.py b/api/migrations/versions/2025_01_14_0617-f051706725cc_add_rate_limit_logs.py index ef495be661..5d79877e28 100644 --- a/api/migrations/versions/2025_01_14_0617-f051706725cc_add_rate_limit_logs.py +++ b/api/migrations/versions/2025_01_14_0617-f051706725cc_add_rate_limit_logs.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'f051706725cc' down_revision = 'ee79d9b1c156' @@ -19,14 +23,27 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('rate_limit_logs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('subscription_plan', sa.String(length=255), nullable=False), - sa.Column('operation', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('rate_limit_logs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('subscription_plan', sa.String(length=255), nullable=False), + sa.Column('operation', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey') + ) + else: + op.create_table('rate_limit_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('subscription_plan', sa.String(length=255), nullable=False), + sa.Column('operation', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey') + ) + with op.batch_alter_table('rate_limit_logs', schema=None) as batch_op: batch_op.create_index('rate_limit_log_operation_idx', ['operation'], unique=False) batch_op.create_index('rate_limit_log_tenant_idx', ['tenant_id'], unique=False) diff --git a/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py b/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py index 877e3a5eed..da512704a6 100644 --- a/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py +++ b/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'd20049ed0af6' down_revision = 'f051706725cc' @@ -19,34 +23,66 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('dataset_metadata_bindings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('metadata_id', models.types.StringUUID(), nullable=False), - sa.Column('document_id', models.types.StringUUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('dataset_metadata_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('metadata_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey') + ) + else: + op.create_table('dataset_metadata_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('metadata_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey') + ) + with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op: batch_op.create_index('dataset_metadata_binding_dataset_idx', ['dataset_id'], unique=False) batch_op.create_index('dataset_metadata_binding_document_idx', ['document_id'], unique=False) batch_op.create_index('dataset_metadata_binding_metadata_idx', ['metadata_id'], unique=False) batch_op.create_index('dataset_metadata_binding_tenant_idx', ['tenant_id'], unique=False) - op.create_table('dataset_metadatas', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey') - ) + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('dataset_metadatas', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey') + ) + else: + # MySQL: Use compatible syntax + op.create_table('dataset_metadatas', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey') + ) + with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op: batch_op.create_index('dataset_metadata_dataset_idx', ['dataset_id'], unique=False) batch_op.create_index('dataset_metadata_tenant_idx', ['tenant_id'], unique=False) @@ -54,23 +90,31 @@ def upgrade(): with op.batch_alter_table('datasets', schema=None) as batch_op: batch_op.add_column(sa.Column('built_in_field_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False)) - with op.batch_alter_table('documents', schema=None) as batch_op: - batch_op.alter_column('doc_metadata', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=postgresql.JSONB(astext_type=sa.Text()), - existing_nullable=True) - batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin') + if _is_pg(conn): + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.alter_column('doc_metadata', + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=postgresql.JSONB(astext_type=sa.Text()), + existing_nullable=True) + batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin') + else: + pass # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('documents', schema=None) as batch_op: - batch_op.drop_index('document_metadata_idx', postgresql_using='gin') - batch_op.alter_column('doc_metadata', - existing_type=postgresql.JSONB(astext_type=sa.Text()), - type_=postgresql.JSON(astext_type=sa.Text()), - existing_nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.drop_index('document_metadata_idx', postgresql_using='gin') + batch_op.alter_column('doc_metadata', + existing_type=postgresql.JSONB(astext_type=sa.Text()), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True) + else: + pass with op.batch_alter_table('datasets', schema=None) as batch_op: batch_op.drop_column('built_in_field_enabled') diff --git a/api/migrations/versions/2025_03_03_1436-ee79d9b1c156_add_marked_name_and_marked_comment_in_.py b/api/migrations/versions/2025_03_03_1436-ee79d9b1c156_add_marked_name_and_marked_comment_in_.py index 5189de40e4..ea1b24b0fa 100644 --- a/api/migrations/versions/2025_03_03_1436-ee79d9b1c156_add_marked_name_and_marked_comment_in_.py +++ b/api/migrations/versions/2025_03_03_1436-ee79d9b1c156_add_marked_name_and_marked_comment_in_.py @@ -17,10 +17,23 @@ branch_labels = None depends_on = None +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + def upgrade(): - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default='')) - batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default='')) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default='')) + batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default='')) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('marked_name', sa.String(length=255), nullable=False, server_default='')) + batch_op.add_column(sa.Column('marked_comment', sa.String(length=255), nullable=False, server_default='')) def downgrade(): diff --git a/api/migrations/versions/2025_05_15_1531-2adcbe1f5dfb_add_workflowdraftvariable_model.py b/api/migrations/versions/2025_05_15_1531-2adcbe1f5dfb_add_workflowdraftvariable_model.py index 5bf394b21c..ef781b63c2 100644 --- a/api/migrations/versions/2025_05_15_1531-2adcbe1f5dfb_add_workflowdraftvariable_model.py +++ b/api/migrations/versions/2025_05_15_1531-2adcbe1f5dfb_add_workflowdraftvariable_model.py @@ -11,6 +11,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = "2adcbe1f5dfb" down_revision = "d28f2004b072" @@ -20,24 +24,46 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "workflow_draft_variables", - sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), - sa.Column("app_id", models.types.StringUUID(), nullable=False), - sa.Column("last_edited_at", sa.DateTime(), nullable=True), - sa.Column("node_id", sa.String(length=255), nullable=False), - sa.Column("name", sa.String(length=255), nullable=False), - sa.Column("description", sa.String(length=255), nullable=False), - sa.Column("selector", sa.String(length=255), nullable=False), - sa.Column("value_type", sa.String(length=20), nullable=False), - sa.Column("value", sa.Text(), nullable=False), - sa.Column("visible", sa.Boolean(), nullable=False), - sa.Column("editable", sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")), - sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")), - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table( + "workflow_draft_variables", + sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("app_id", models.types.StringUUID(), nullable=False), + sa.Column("last_edited_at", sa.DateTime(), nullable=True), + sa.Column("node_id", sa.String(length=255), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("description", sa.String(length=255), nullable=False), + sa.Column("selector", sa.String(length=255), nullable=False), + sa.Column("value_type", sa.String(length=20), nullable=False), + sa.Column("value", sa.Text(), nullable=False), + sa.Column("visible", sa.Boolean(), nullable=False), + sa.Column("editable", sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")), + sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")), + ) + else: + op.create_table( + "workflow_draft_variables", + sa.Column("id", models.types.StringUUID(), nullable=False), + sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column("app_id", models.types.StringUUID(), nullable=False), + sa.Column("last_edited_at", sa.DateTime(), nullable=True), + sa.Column("node_id", sa.String(length=255), nullable=False), + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("description", sa.String(length=255), nullable=False), + sa.Column("selector", sa.String(length=255), nullable=False), + sa.Column("value_type", sa.String(length=20), nullable=False), + sa.Column("value", models.types.LongText(), nullable=False), + sa.Column("visible", sa.Boolean(), nullable=False), + sa.Column("editable", sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")), + sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")), + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_06_06_1424-4474872b0ee6_workflow_draft_varaibles_add_node_execution_id.py b/api/migrations/versions/2025_06_06_1424-4474872b0ee6_workflow_draft_varaibles_add_node_execution_id.py index d7a5d116c9..610064320a 100644 --- a/api/migrations/versions/2025_06_06_1424-4474872b0ee6_workflow_draft_varaibles_add_node_execution_id.py +++ b/api/migrations/versions/2025_06_06_1424-4474872b0ee6_workflow_draft_varaibles_add_node_execution_id.py @@ -7,6 +7,10 @@ Create Date: 2025-06-06 14:24:44.213018 """ from alembic import op import models as models + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -18,19 +22,30 @@ depends_on = None def upgrade(): - # `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block` - # context manager to wrap the index creation statement. - # Reference: - # - # - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot. - # - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block - with op.get_context().autocommit_block(): + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + + if _is_pg(conn): + # `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block` + # context manager to wrap the index creation statement. + # Reference: + # + # - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot. + # - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block + with op.get_context().autocommit_block(): + op.create_index( + op.f('workflow_node_executions_tenant_id_idx'), + "workflow_node_executions", + ['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')], + unique=False, + postgresql_concurrently=True, + ) + else: op.create_index( op.f('workflow_node_executions_tenant_id_idx'), "workflow_node_executions", ['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')], unique=False, - postgresql_concurrently=True, ) with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op: @@ -51,8 +66,13 @@ def downgrade(): # Reference: # # https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot. - with op.get_context().autocommit_block(): - op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.get_context().autocommit_block(): + op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True) + else: + op.drop_index(op.f('workflow_node_executions_tenant_id_idx')) with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op: batch_op.drop_column('node_execution_id') diff --git a/api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py b/api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py index 0548bf05ef..83a7d1814c 100644 --- a/api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py +++ b/api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + # revision identifiers, used by Alembic. revision = '58eb7bdb93fe' down_revision = '0ab65e1cc7fa' @@ -19,40 +23,80 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('app_mcp_servers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.String(length=255), nullable=False), - sa.Column('server_code', sa.String(length=255), nullable=False), - sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), - sa.Column('parameters', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'), - sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'), - sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code') - ) - op.create_table('tool_mcp_providers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=40), nullable=False), - sa.Column('server_identifier', sa.String(length=24), nullable=False), - sa.Column('server_url', sa.Text(), nullable=False), - sa.Column('server_url_hash', sa.String(length=64), nullable=False), - sa.Column('icon', sa.String(length=255), nullable=True), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('user_id', models.types.StringUUID(), nullable=False), - sa.Column('encrypted_credentials', sa.Text(), nullable=True), - sa.Column('authed', sa.Boolean(), nullable=False), - sa.Column('tools', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'), - sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'), - sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('app_mcp_servers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('server_code', sa.String(length=255), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), + sa.Column('parameters', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'), + sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code') + ) + else: + op.create_table('app_mcp_servers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('server_code', sa.String(length=255), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False), + sa.Column('parameters', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'), + sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code') + ) + if _is_pg(conn): + op.create_table('tool_mcp_providers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('server_identifier', sa.String(length=24), nullable=False), + sa.Column('server_url', sa.Text(), nullable=False), + sa.Column('server_url_hash', sa.String(length=64), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('encrypted_credentials', sa.Text(), nullable=True), + sa.Column('authed', sa.Boolean(), nullable=False), + sa.Column('tools', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'), + sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'), + sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url') + ) + else: + op.create_table('tool_mcp_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('server_identifier', sa.String(length=24), nullable=False), + sa.Column('server_url', models.types.LongText(), nullable=False), + sa.Column('server_url_hash', sa.String(length=64), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('encrypted_credentials', models.types.LongText(), nullable=True), + sa.Column('authed', sa.Boolean(), nullable=False), + sa.Column('tools', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'), + sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'), + sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_07_02_2332-1c9ba48be8e4_add_uuidv7_function_in_sql.py b/api/migrations/versions/2025_07_02_2332-1c9ba48be8e4_add_uuidv7_function_in_sql.py index 2bbbb3d28e..1aa92b7d50 100644 --- a/api/migrations/versions/2025_07_02_2332-1c9ba48be8e4_add_uuidv7_function_in_sql.py +++ b/api/migrations/versions/2025_07_02_2332-1c9ba48be8e4_add_uuidv7_function_in_sql.py @@ -27,6 +27,10 @@ import models as models import sqlalchemy as sa +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + # revision identifiers, used by Alembic. revision = '1c9ba48be8e4' down_revision = '58eb7bdb93fe' @@ -40,7 +44,11 @@ def upgrade(): # The ability to specify source timestamp has been removed because its type signature is incompatible with # PostgreSQL 18's `uuidv7` function. This capability is rarely needed in practice, as IDs can be # generated and controlled within the application layer. - op.execute(sa.text(r""" + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Create uuidv7 functions + op.execute(sa.text(r""" /* Main function to generate a uuidv7 value with millisecond precision */ CREATE FUNCTION uuidv7() RETURNS uuid AS @@ -63,7 +71,7 @@ COMMENT ON FUNCTION uuidv7 IS 'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness'; """)) - op.execute(sa.text(r""" + op.execute(sa.text(r""" CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid AS $$ @@ -79,8 +87,15 @@ COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS 'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.'; """ )) + else: + pass def downgrade(): - op.execute(sa.text("DROP FUNCTION uuidv7")) - op.execute(sa.text("DROP FUNCTION uuidv7_boundary")) + conn = op.get_bind() + + if _is_pg(conn): + op.execute(sa.text("DROP FUNCTION uuidv7")) + op.execute(sa.text("DROP FUNCTION uuidv7_boundary")) + else: + pass diff --git a/api/migrations/versions/2025_07_04_1705-71f5020c6470_tool_oauth.py b/api/migrations/versions/2025_07_04_1705-71f5020c6470_tool_oauth.py index df4fbf0a0e..e22af7cb8a 100644 --- a/api/migrations/versions/2025_07_04_1705-71f5020c6470_tool_oauth.py +++ b/api/migrations/versions/2025_07_04_1705-71f5020c6470_tool_oauth.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + # revision identifiers, used by Alembic. revision = '71f5020c6470' down_revision = '1c9ba48be8e4' @@ -19,31 +23,63 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_oauth_system_clients', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('plugin_id', sa.String(length=512), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'), - sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx') - ) - op.create_table('tool_oauth_tenant_clients', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('plugin_id', sa.String(length=512), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'), - sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_oauth_system_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx') + ) + else: + op.create_table('tool_oauth_system_clients', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx') + ) + if _is_pg(conn): + op.create_table('tool_oauth_tenant_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client') + ) + else: + op.create_table('tool_oauth_tenant_clients', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client') + ) - with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False)) - batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False)) - batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False)) - batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique') - batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name']) + if _is_pg(conn): + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False)) + batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False)) + batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False)) + batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique') + batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name']) + else: + with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'"), nullable=False)) + batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False)) + batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'"), nullable=False)) + batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique') + batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name']) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py b/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py index 4ff0402a97..48b6ceb145 100644 --- a/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py +++ b/api/migrations/versions/2025_07_23_1508-8bcc02c9bd07_add_tenant_plugin_autoupgrade_table.py @@ -10,6 +10,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '8bcc02c9bd07' down_revision = '375fe79ead14' @@ -19,19 +23,36 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tenant_plugin_auto_upgrade_strategies', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False), - sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False), - sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False), - sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), - sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'), - sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tenant_plugin_auto_upgrade_strategies', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False), + sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False), + sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False), + sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), + sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'), + sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy') + ) + else: + op.create_table('tenant_plugin_auto_upgrade_strategies', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False), + sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False), + sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False), + sa.Column('exclude_plugins', sa.JSON(), nullable=False), + sa.Column('include_plugins', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'), + sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_07_24_1450-532b3f888abf_manual_dataset_field_update.py b/api/migrations/versions/2025_07_24_1450-532b3f888abf_manual_dataset_field_update.py index 1664fb99c4..2597067e81 100644 --- a/api/migrations/versions/2025_07_24_1450-532b3f888abf_manual_dataset_field_update.py +++ b/api/migrations/versions/2025_07_24_1450-532b3f888abf_manual_dataset_field_update.py @@ -7,6 +7,10 @@ Create Date: 2025-07-24 14:50:48.779833 """ from alembic import op import models as models + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -18,8 +22,18 @@ depends_on = None def upgrade(): - op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying") + conn = op.get_bind() + + if _is_pg(conn): + op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying") + else: + op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'") def downgrade(): - op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'") + conn = op.get_bind() + + if _is_pg(conn): + op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying") + else: + op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'") diff --git a/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py b/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py index da8b1aa796..18e1b8d601 100644 --- a/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py +++ b/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py @@ -11,6 +11,10 @@ import models as models import sqlalchemy as sa from sqlalchemy.sql import table, column + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'e8446f481c1e' down_revision = 'fa8b0fa6f407' @@ -20,16 +24,30 @@ depends_on = None def upgrade(): # Create provider_credentials table - op.create_table('provider_credentials', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_name', sa.String(length=255), nullable=False), - sa.Column('credential_name', sa.String(length=255), nullable=False), - sa.Column('encrypted_config', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_credential_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('provider_credentials', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_credential_pkey') + ) + else: + op.create_table('provider_credentials', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_credential_pkey') + ) # Create index for provider_credentials with op.batch_alter_table('provider_credentials', schema=None) as batch_op: @@ -60,27 +78,49 @@ def upgrade(): def migrate_existing_providers_data(): """migrate providers table data to provider_credentials""" - + conn = op.get_bind() # Define table structure for data manipulation - providers_table = table('providers', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()), - column('credential_id', models.types.StringUUID()), - ) + if _is_pg(conn): + providers_table = table('providers', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) + else: + providers_table = table('providers', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) - provider_credential_table = table('provider_credentials', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('credential_name', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()) - ) + if _is_pg(conn): + provider_credential_table = table('provider_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) + else: + provider_credential_table = table('provider_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) # Get database connection conn = op.get_bind() @@ -123,8 +163,14 @@ def migrate_existing_providers_data(): def downgrade(): # Re-add encrypted_config column to providers table - with op.batch_alter_table('providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True)) # Migrate data back from provider_credentials to providers diff --git a/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py index f03a215505..16ca902726 100644 --- a/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py +++ b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py @@ -13,6 +13,10 @@ import sqlalchemy as sa from sqlalchemy.sql import table, column +def _is_pg(conn): + return conn.dialect.name == "postgresql" + + # revision identifiers, used by Alembic. revision = '0e154742a5fa' down_revision = 'e8446f481c1e' @@ -22,18 +26,34 @@ depends_on = None def upgrade(): # Create provider_model_credentials table - op.create_table('provider_model_credentials', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_name', sa.String(length=255), nullable=False), - sa.Column('model_name', sa.String(length=255), nullable=False), - sa.Column('model_type', sa.String(length=40), nullable=False), - sa.Column('credential_name', sa.String(length=255), nullable=False), - sa.Column('encrypted_config', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('provider_model_credentials', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey') + ) + else: + op.create_table('provider_model_credentials', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey') + ) # Create index for provider_model_credentials with op.batch_alter_table('provider_model_credentials', schema=None) as batch_op: @@ -66,31 +86,57 @@ def upgrade(): def migrate_existing_provider_models_data(): """migrate provider_models table data to provider_model_credentials""" - + conn = op.get_bind() # Define table structure for data manipulation - provider_models_table = table('provider_models', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()), - column('credential_id', models.types.StringUUID()), - ) + if _is_pg(conn): + provider_models_table = table('provider_models', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) + else: + provider_models_table = table('provider_models', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) - provider_model_credentials_table = table('provider_model_credentials', - column('id', models.types.StringUUID()), - column('tenant_id', models.types.StringUUID()), - column('provider_name', sa.String()), - column('model_name', sa.String()), - column('model_type', sa.String()), - column('credential_name', sa.String()), - column('encrypted_config', sa.Text()), - column('created_at', sa.DateTime()), - column('updated_at', sa.DateTime()) - ) + if _is_pg(conn): + provider_model_credentials_table = table('provider_model_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) + else: + provider_model_credentials_table = table('provider_model_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', models.types.LongText()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) # Get database connection @@ -137,8 +183,14 @@ def migrate_existing_provider_models_data(): def downgrade(): # Re-add encrypted_config column to provider_models table - with op.batch_alter_table('provider_models', schema=None) as batch_op: - batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True)) if not context.is_offline_mode(): # Migrate data back from provider_model_credentials to provider_models diff --git a/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py index 3a3186bcbc..75b4d61173 100644 --- a/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py +++ b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py @@ -8,6 +8,11 @@ Create Date: 2025-08-20 17:47:17.015695 from alembic import op import models as models import sqlalchemy as sa +from libs.uuid_utils import uuidv7 + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" # revision identifiers, used by Alembic. @@ -19,17 +24,33 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('oauth_provider_apps', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('app_icon', sa.String(length=255), nullable=False), - sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False), - sa.Column('client_id', sa.String(length=255), nullable=False), - sa.Column('client_secret', sa.String(length=255), nullable=False), - sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False), - sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('oauth_provider_apps', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_icon', sa.String(length=255), nullable=False), + sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False), + sa.Column('client_id', sa.String(length=255), nullable=False), + sa.Column('client_secret', sa.String(length=255), nullable=False), + sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False), + sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey') + ) + else: + op.create_table('oauth_provider_apps', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_icon', sa.String(length=255), nullable=False), + sa.Column('app_label', sa.JSON(), default='{}', nullable=False), + sa.Column('client_id', sa.String(length=255), nullable=False), + sa.Column('client_secret', sa.String(length=255), nullable=False), + sa.Column('redirect_uris', sa.JSON(), default='[]', nullable=False), + sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey') + ) + with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op: batch_op.create_index('oauth_provider_app_client_id_idx', ['client_id'], unique=False) diff --git a/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py b/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py index 99d47478f3..4f472fe4b4 100644 --- a/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py +++ b/api/migrations/versions/2025_09_08_1007-c20211f18133_add_headers_to_mcp_provider.py @@ -7,6 +7,10 @@ Create Date: 2025-08-29 10:07:54.163626 """ from alembic import op import models as models + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -19,7 +23,12 @@ depends_on = None def upgrade(): # Add encrypted_headers column to tool_mcp_providers table - op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True)) + else: + op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True)) def downgrade(): diff --git a/api/migrations/versions/2025_09_11_1537-cf7c38a32b2d_add_credential_status_for_provider_table.py b/api/migrations/versions/2025_09_11_1537-cf7c38a32b2d_add_credential_status_for_provider_table.py index 17467e6495..4f78f346f4 100644 --- a/api/migrations/versions/2025_09_11_1537-cf7c38a32b2d_add_credential_status_for_provider_table.py +++ b/api/migrations/versions/2025_09_11_1537-cf7c38a32b2d_add_credential_status_for_provider_table.py @@ -7,6 +7,9 @@ Create Date: 2025-09-11 15:37:17.771298 """ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -19,8 +22,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True)) + else: + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'"), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py index 53a95141ec..8eac0dee10 100644 --- a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py +++ b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py @@ -9,6 +9,11 @@ from alembic import op import models as models import sqlalchemy as sa from sqlalchemy.dialects import postgresql +from libs.uuid_utils import uuidv7 + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" # revision identifiers, used by Alembic. revision = '68519ad5cd18' @@ -19,152 +24,314 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('datasource_oauth_params', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('plugin_id', sa.String(length=255), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'), - sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx') - ) - op.create_table('datasource_oauth_tenant_params', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('plugin_id', sa.String(length=255), nullable=False), - sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('enabled', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'), - sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique') - ) - op.create_table('datasource_providers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('plugin_id', sa.String(length=255), nullable=False), - sa.Column('auth_type', sa.String(length=255), nullable=False), - sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('avatar_url', sa.Text(), nullable=True), - sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('datasource_oauth_params', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx') + ) + else: + op.create_table('datasource_oauth_params', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('system_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx') + ) + if _is_pg(conn): + op.create_table('datasource_oauth_tenant_params', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique') + ) + else: + op.create_table('datasource_oauth_tenant_params', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('client_params', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique') + ) + if _is_pg(conn): + op.create_table('datasource_providers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('auth_type', sa.String(length=255), nullable=False), + sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('avatar_url', sa.Text(), nullable=True), + sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name') + ) + else: + op.create_table('datasource_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=128), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('auth_type', sa.String(length=255), nullable=False), + sa.Column('encrypted_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False), + sa.Column('avatar_url', models.types.LongText(), nullable=True), + sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name') + ) with op.batch_alter_table('datasource_providers', schema=None) as batch_op: batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False) - op.create_table('document_pipeline_execution_logs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('pipeline_id', models.types.StringUUID(), nullable=False), - sa.Column('document_id', models.types.StringUUID(), nullable=False), - sa.Column('datasource_type', sa.String(length=255), nullable=False), - sa.Column('datasource_info', sa.Text(), nullable=False), - sa.Column('datasource_node_id', sa.String(length=255), nullable=False), - sa.Column('input_data', sa.JSON(), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey') - ) + if _is_pg(conn): + op.create_table('document_pipeline_execution_logs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('pipeline_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('datasource_type', sa.String(length=255), nullable=False), + sa.Column('datasource_info', sa.Text(), nullable=False), + sa.Column('datasource_node_id', sa.String(length=255), nullable=False), + sa.Column('input_data', sa.JSON(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey') + ) + else: + op.create_table('document_pipeline_execution_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('pipeline_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('datasource_type', sa.String(length=255), nullable=False), + sa.Column('datasource_info', models.types.LongText(), nullable=False), + sa.Column('datasource_node_id', sa.String(length=255), nullable=False), + sa.Column('input_data', sa.JSON(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey') + ) with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op: batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False) - op.create_table('pipeline_built_in_templates', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.Column('chunk_structure', sa.String(length=255), nullable=False), - sa.Column('icon', sa.JSON(), nullable=False), - sa.Column('yaml_content', sa.Text(), nullable=False), - sa.Column('copyright', sa.String(length=255), nullable=False), - sa.Column('privacy_policy', sa.String(length=255), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('install_count', sa.Integer(), nullable=False), - sa.Column('language', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey') - ) - op.create_table('pipeline_customized_templates', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.Column('chunk_structure', sa.String(length=255), nullable=False), - sa.Column('icon', sa.JSON(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('yaml_content', sa.Text(), nullable=False), - sa.Column('install_count', sa.Integer(), nullable=False), - sa.Column('language', sa.String(length=255), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey') - ) + if _is_pg(conn): + op.create_table('pipeline_built_in_templates', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('chunk_structure', sa.String(length=255), nullable=False), + sa.Column('icon', sa.JSON(), nullable=False), + sa.Column('yaml_content', sa.Text(), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=False), + sa.Column('privacy_policy', sa.String(length=255), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('language', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey') + ) + else: + op.create_table('pipeline_built_in_templates', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', models.types.LongText(), nullable=False), + sa.Column('chunk_structure', sa.String(length=255), nullable=False), + sa.Column('icon', sa.JSON(), nullable=False), + sa.Column('yaml_content', models.types.LongText(), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=False), + sa.Column('privacy_policy', sa.String(length=255), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('language', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey') + ) + if _is_pg(conn): + op.create_table('pipeline_customized_templates', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('chunk_structure', sa.String(length=255), nullable=False), + sa.Column('icon', sa.JSON(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('yaml_content', sa.Text(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('language', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey') + ) + else: + # MySQL: Use compatible syntax + op.create_table('pipeline_customized_templates', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', models.types.LongText(), nullable=False), + sa.Column('chunk_structure', sa.String(length=255), nullable=False), + sa.Column('icon', sa.JSON(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('yaml_content', models.types.LongText(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('language', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey') + ) with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op: batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False) - op.create_table('pipeline_recommended_plugins', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('plugin_id', sa.Text(), nullable=False), - sa.Column('provider_name', sa.Text(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('active', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey') - ) - op.create_table('pipelines', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False), - sa.Column('workflow_id', models.types.StringUUID(), nullable=True), - sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_by', models.types.StringUUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='pipeline_pkey') - ) - op.create_table('workflow_draft_variable_files', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'), - sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'), - sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'), - sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'), - sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'), - sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'), - sa.Column('value_type', sa.String(20), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey')) - ) - op.create_table('workflow_node_execution_offload', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('node_execution_id', models.types.StringUUID(), nullable=True), - sa.Column('type', sa.String(20), nullable=False), - sa.Column('file_id', models.types.StringUUID(), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')), - sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key')) - ) - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True)) - batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) - batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True)) - batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True)) - batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True)) - batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False)) + if _is_pg(conn): + op.create_table('pipeline_recommended_plugins', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('plugin_id', sa.Text(), nullable=False), + sa.Column('provider_name', sa.Text(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('active', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey') + ) + else: + op.create_table('pipeline_recommended_plugins', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', models.types.LongText(), nullable=False), + sa.Column('provider_name', models.types.LongText(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('active', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey') + ) + if _is_pg(conn): + op.create_table('pipelines', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=True), + sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_pkey') + ) + else: + op.create_table('pipelines', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=True), + sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='pipeline_pkey') + ) + if _is_pg(conn): + op.create_table('workflow_draft_variable_files', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'), + sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'), + sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'), + sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'), + sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'), + sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'), + sa.Column('value_type', sa.String(20), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey')) + ) + else: + op.create_table('workflow_draft_variable_files', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'), + sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'), + sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'), + sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'), + sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'), + sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'), + sa.Column('value_type', sa.String(20), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey')) + ) + if _is_pg(conn): + op.create_table('workflow_node_execution_offload', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_execution_id', models.types.StringUUID(), nullable=True), + sa.Column('type', sa.String(20), nullable=False), + sa.Column('file_id', models.types.StringUUID(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')), + sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key')) + ) + else: + op.create_table('workflow_node_execution_offload', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_execution_id', models.types.StringUUID(), nullable=True), + sa.Column('type', sa.String(20), nullable=False), + sa.Column('file_id', models.types.StringUUID(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')), + sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key')) + ) + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True)) + batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True)) + batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True)) + batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False)) + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True)) + batch_op.add_column(sa.Column('icon_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True)) + batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'"), nullable=True)) + batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True)) + batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False)) with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op: batch_op.add_column(sa.Column('file_id', models.types.StringUUID(), nullable=True, comment='Reference to WorkflowDraftVariableFile if variable is offloaded to external storage')) @@ -175,9 +342,12 @@ def upgrade(): comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',) ) batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False) - - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False)) + if _is_pg(conn): + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False)) + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('rag_pipeline_variables', models.types.LongText(), default='{}', nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py index 086a02e7c3..0776ab0818 100644 --- a/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py +++ b/api/migrations/versions/2025_10_21_1430-ae662b25d9bc_remove_builtin_template_user.py @@ -7,6 +7,10 @@ Create Date: 2025-10-21 14:30:28.566192 """ from alembic import op import models as models + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" import sqlalchemy as sa @@ -29,8 +33,15 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False)) - batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False)) + batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True)) + else: + with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False)) + batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py b/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py index 1ab4202674..627219cc4b 100644 --- a/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py +++ b/api/migrations/versions/2025_10_22_1611-03f8dcbc611e_add_workflowpause_model.py @@ -9,7 +9,10 @@ Create Date: 2025-10-22 16:11:31.805407 from alembic import op import models as models import sqlalchemy as sa +from libs.uuid_utils import uuidv7 +def _is_pg(conn): + return conn.dialect.name == "postgresql" # revision identifiers, used by Alembic. revision = "03f8dcbc611e" @@ -19,19 +22,33 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "workflow_pauses", - sa.Column("workflow_id", models.types.StringUUID(), nullable=False), - sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False), - sa.Column("resumed_at", sa.DateTime(), nullable=True), - sa.Column("state_object_key", sa.String(length=255), nullable=False), - sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False), - sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), - sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")), - sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")), - ) - + conn = op.get_bind() + if _is_pg(conn): + op.create_table( + "workflow_pauses", + sa.Column("workflow_id", models.types.StringUUID(), nullable=False), + sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False), + sa.Column("resumed_at", sa.DateTime(), nullable=True), + sa.Column("state_object_key", sa.String(length=255), nullable=False), + sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False), + sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")), + sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")), + ) + else: + op.create_table( + "workflow_pauses", + sa.Column("workflow_id", models.types.StringUUID(), nullable=False), + sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False), + sa.Column("resumed_at", sa.DateTime(), nullable=True), + sa.Column("state_object_key", sa.String(length=255), nullable=False), + sa.Column("id", models.types.StringUUID(), nullable=False), + sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")), + sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")), + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py b/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py index c03d64b234..9641a15c89 100644 --- a/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py +++ b/api/migrations/versions/2025_10_30_1518-669ffd70119c_introduce_trigger.py @@ -8,9 +8,12 @@ Create Date: 2025-10-30 15:18:49.549156 from alembic import op import models as models import sqlalchemy as sa +from libs.uuid_utils import uuidv7 from models.enums import AppTriggerStatus, AppTriggerType +def _is_pg(conn): + return conn.dialect.name == "postgresql" # revision identifiers, used by Alembic. revision = '669ffd70119c' @@ -21,125 +24,246 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('app_triggers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('node_id', sa.String(length=64), nullable=False), - sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True), - sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_trigger_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('app_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True), + sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_trigger_pkey') + ) + else: + op.create_table('app_triggers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True), + sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_trigger_pkey') + ) with op.batch_alter_table('app_triggers', schema=None) as batch_op: batch_op.create_index('app_trigger_tenant_app_idx', ['tenant_id', 'app_id'], unique=False) - op.create_table('trigger_oauth_system_clients', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('plugin_id', sa.String(length=512), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'), - sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx') - ) - op.create_table('trigger_oauth_tenant_clients', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('plugin_id', sa.String(length=512), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'), - sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client') - ) - op.create_table('trigger_subscriptions', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('user_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'), - sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'), - sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'), - sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'), - sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'), - sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'), - sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'), - sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider') - ) + if _is_pg(conn): + op.create_table('trigger_oauth_system_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx') + ) + else: + op.create_table('trigger_oauth_system_clients', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=512), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'), + sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx') + ) + if _is_pg(conn): + op.create_table('trigger_oauth_tenant_clients', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('encrypted_oauth_params', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client') + ) + else: + op.create_table('trigger_oauth_tenant_clients', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('plugin_id', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'), + sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client') + ) + if _is_pg(conn): + op.create_table('trigger_subscriptions', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'), + sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'), + sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'), + sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'), + sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'), + sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'), + sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'), + sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider') + ) + else: + op.create_table('trigger_subscriptions', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'), + sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'), + sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'), + sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'), + sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'), + sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'), + sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'), + sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider') + ) with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op: batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True) batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False) batch_op.create_index('idx_trigger_providers_tenant_provider', ['tenant_id', 'provider_id'], unique=False) - op.create_table('workflow_plugin_triggers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('node_id', sa.String(length=64), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_id', sa.String(length=512), nullable=False), - sa.Column('event_name', sa.String(length=255), nullable=False), - sa.Column('subscription_id', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'), - sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription') - ) + if _is_pg(conn): + op.create_table('workflow_plugin_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=512), nullable=False), + sa.Column('event_name', sa.String(length=255), nullable=False), + sa.Column('subscription_id', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription') + ) + else: + op.create_table('workflow_plugin_triggers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_id', sa.String(length=512), nullable=False), + sa.Column('event_name', sa.String(length=255), nullable=False), + sa.Column('subscription_id', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription') + ) with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op: batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False) - op.create_table('workflow_schedule_plans', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('node_id', sa.String(length=64), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('cron_expression', sa.String(length=255), nullable=False), - sa.Column('timezone', sa.String(length=64), nullable=False), - sa.Column('next_run_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'), - sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node') - ) + if _is_pg(conn): + op.create_table('workflow_schedule_plans', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('cron_expression', sa.String(length=255), nullable=False), + sa.Column('timezone', sa.String(length=64), nullable=False), + sa.Column('next_run_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node') + ) + else: + op.create_table('workflow_schedule_plans', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('cron_expression', sa.String(length=255), nullable=False), + sa.Column('timezone', sa.String(length=64), nullable=False), + sa.Column('next_run_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node') + ) with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op: batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False) - op.create_table('workflow_trigger_logs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('workflow_id', models.types.StringUUID(), nullable=False), - sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), - sa.Column('root_node_id', sa.String(length=255), nullable=True), - sa.Column('trigger_metadata', sa.Text(), nullable=False), - sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), - sa.Column('trigger_data', sa.Text(), nullable=False), - sa.Column('inputs', sa.Text(), nullable=False), - sa.Column('outputs', sa.Text(), nullable=True), - sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), - sa.Column('error', sa.Text(), nullable=True), - sa.Column('queue_name', sa.String(length=100), nullable=False), - sa.Column('celery_task_id', sa.String(length=255), nullable=True), - sa.Column('retry_count', sa.Integer(), nullable=False), - sa.Column('elapsed_time', sa.Float(), nullable=True), - sa.Column('total_tokens', sa.Integer(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('created_by_role', sa.String(length=255), nullable=False), - sa.Column('created_by', sa.String(length=255), nullable=False), - sa.Column('triggered_at', sa.DateTime(), nullable=True), - sa.Column('finished_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey') - ) + if _is_pg(conn): + op.create_table('workflow_trigger_logs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), + sa.Column('root_node_id', sa.String(length=255), nullable=True), + sa.Column('trigger_metadata', sa.Text(), nullable=False), + sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), + sa.Column('trigger_data', sa.Text(), nullable=False), + sa.Column('inputs', sa.Text(), nullable=False), + sa.Column('outputs', sa.Text(), nullable=True), + sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('queue_name', sa.String(length=100), nullable=False), + sa.Column('celery_task_id', sa.String(length=255), nullable=True), + sa.Column('retry_count', sa.Integer(), nullable=False), + sa.Column('elapsed_time', sa.Float(), nullable=True), + sa.Column('total_tokens', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', sa.String(length=255), nullable=False), + sa.Column('triggered_at', sa.DateTime(), nullable=True), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey') + ) + else: + op.create_table('workflow_trigger_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), + sa.Column('root_node_id', sa.String(length=255), nullable=True), + sa.Column('trigger_metadata', models.types.LongText(), nullable=False), + sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False), + sa.Column('trigger_data', models.types.LongText(), nullable=False), + sa.Column('inputs', models.types.LongText(), nullable=False), + sa.Column('outputs', models.types.LongText(), nullable=True), + sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False), + sa.Column('error', models.types.LongText(), nullable=True), + sa.Column('queue_name', sa.String(length=100), nullable=False), + sa.Column('celery_task_id', sa.String(length=255), nullable=True), + sa.Column('retry_count', sa.Integer(), nullable=False), + sa.Column('elapsed_time', sa.Float(), nullable=True), + sa.Column('total_tokens', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', sa.String(length=255), nullable=False), + sa.Column('triggered_at', sa.DateTime(), nullable=True), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey') + ) with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op: batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False) batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False) @@ -147,19 +271,34 @@ def upgrade(): batch_op.create_index('workflow_trigger_log_workflow_id_idx', ['workflow_id'], unique=False) batch_op.create_index('workflow_trigger_log_workflow_run_idx', ['workflow_run_id'], unique=False) - op.create_table('workflow_webhook_triggers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('node_id', sa.String(length=64), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('webhook_id', sa.String(length=24), nullable=False), - sa.Column('created_by', models.types.StringUUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'), - sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'), - sa.UniqueConstraint('webhook_id', name='uniq_webhook_id') - ) + if _is_pg(conn): + op.create_table('workflow_webhook_triggers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('webhook_id', sa.String(length=24), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'), + sa.UniqueConstraint('webhook_id', name='uniq_webhook_id') + ) + else: + op.create_table('workflow_webhook_triggers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('node_id', sa.String(length=64), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('webhook_id', sa.String(length=24), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'), + sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'), + sa.UniqueConstraint('webhook_id', name='uniq_webhook_id') + ) with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op: batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False) @@ -184,8 +323,14 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True)) + else: + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'"), autoincrement=False, nullable=True)) with op.batch_alter_table('celery_tasksetmeta', schema=None) as batch_op: batch_op.alter_column('taskset_id', diff --git a/api/migrations/versions/2025_11_15_2102-09cfdda155d1_mysql_adaptation.py b/api/migrations/versions/2025_11_15_2102-09cfdda155d1_mysql_adaptation.py new file mode 100644 index 0000000000..a3f6c3cb19 --- /dev/null +++ b/api/migrations/versions/2025_11_15_2102-09cfdda155d1_mysql_adaptation.py @@ -0,0 +1,131 @@ +"""empty message + +Revision ID: 09cfdda155d1 +Revises: 669ffd70119c +Create Date: 2025-11-15 21:02:32.472885 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql, mysql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + +# revision identifiers, used by Alembic. +revision = '09cfdda155d1' +down_revision = '669ffd70119c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + if _is_pg(conn): + with op.batch_alter_table('datasource_providers', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=255), + type_=sa.String(length=128), + existing_nullable=False) + + with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op: + batch_op.alter_column('external_knowledge_id', + existing_type=sa.TEXT(), + type_=sa.String(length=512), + existing_nullable=False) + + with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op: + batch_op.alter_column('exclude_plugins', + existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)), + type_=sa.JSON(), + existing_nullable=False, + postgresql_using='to_jsonb(exclude_plugins)::json') + + batch_op.alter_column('include_plugins', + existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)), + type_=sa.JSON(), + existing_nullable=False, + postgresql_using='to_jsonb(include_plugins)::json') + + with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=sa.VARCHAR(length=512), + type_=sa.String(length=255), + existing_nullable=False) + + with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=sa.VARCHAR(length=512), + type_=sa.String(length=255), + existing_nullable=False) + else: + with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=mysql.VARCHAR(length=512), + type_=sa.String(length=255), + existing_nullable=False) + + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=mysql.TIMESTAMP(), + type_=sa.DateTime(), + existing_nullable=False) + + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + if _is_pg(conn): + with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=512), + existing_nullable=False) + + with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=512), + existing_nullable=False) + + with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op: + batch_op.alter_column('include_plugins', + existing_type=sa.JSON(), + type_=postgresql.ARRAY(sa.VARCHAR(length=255)), + existing_nullable=False) + batch_op.alter_column('exclude_plugins', + existing_type=sa.JSON(), + type_=postgresql.ARRAY(sa.VARCHAR(length=255)), + existing_nullable=False) + + with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op: + batch_op.alter_column('external_knowledge_id', + existing_type=sa.String(length=512), + type_=sa.TEXT(), + existing_nullable=False) + + with op.batch_alter_table('datasource_providers', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.String(length=128), + type_=sa.VARCHAR(length=255), + existing_nullable=False) + + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.alter_column('updated_at', + existing_type=sa.DateTime(), + type_=mysql.TIMESTAMP(), + existing_nullable=False) + + with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op: + batch_op.alter_column('plugin_id', + existing_type=sa.String(length=255), + type_=mysql.VARCHAR(length=512), + existing_nullable=False) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py b/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py index f3eef4681e..fae506906b 100644 --- a/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py +++ b/api/migrations/versions/23db93619b9d_add_message_files_into_agent_thought.py @@ -8,6 +8,12 @@ Create Date: 2024-01-18 08:46:37.302657 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '23db93619b9d' down_revision = '8ae9bc661daa' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py b/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py index 9816e92dd1..2676ef0b94 100644 --- a/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py +++ b/api/migrations/versions/246ba09cbbdb_add_app_anntation_setting.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '246ba09cbbdb' down_revision = '714aafe25d39' @@ -18,17 +24,33 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('app_annotation_settings', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False), - sa.Column('collection_binding_id', postgresql.UUID(), nullable=False), - sa.Column('created_user_id', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_user_id', postgresql.UUID(), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('app_annotation_settings', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('collection_binding_id', postgresql.UUID(), nullable=False), + sa.Column('created_user_id', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_user_id', postgresql.UUID(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey') + ) + else: + op.create_table('app_annotation_settings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('collection_binding_id', models.types.StringUUID(), nullable=False), + sa.Column('created_user_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_user_id', models.types.StringUUID(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey') + ) + with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op: batch_op.create_index('app_annotation_settings_app_idx', ['app_id'], unique=False) @@ -40,8 +62,14 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True)) with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op: batch_op.drop_index('app_annotation_settings_app_idx') diff --git a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py index 99b7010612..3362a3a09f 100644 --- a/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py +++ b/api/migrations/versions/2a3aebbbf4bb_add_app_tracing.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '2a3aebbbf4bb' down_revision = 'c031d46af369' @@ -19,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py b/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py index b06a3530b8..40bd727f66 100644 --- a/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py +++ b/api/migrations/versions/2e9819ca5b28_add_tenant_id_in_api_token.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '2e9819ca5b28' down_revision = 'ab23c11305d4' @@ -18,19 +24,35 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True)) - batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) - batch_op.drop_column('dataset_id') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True)) + batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) + batch_op.drop_column('dataset_id') + else: + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True)) + batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False) + batch_op.drop_column('dataset_id') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('api_tokens', schema=None) as batch_op: - batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True)) - batch_op.drop_index('api_token_tenant_idx') - batch_op.drop_column('tenant_id') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True)) + batch_op.drop_index('api_token_tenant_idx') + batch_op.drop_column('tenant_id') + else: + with op.batch_alter_table('api_tokens', schema=None) as batch_op: + batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True)) + batch_op.drop_index('api_token_tenant_idx') + batch_op.drop_column('tenant_id') # ### end Alembic commands ### diff --git a/api/migrations/versions/380c6aa5a70d_add_tool_labels_to_agent_thought.py b/api/migrations/versions/380c6aa5a70d_add_tool_labels_to_agent_thought.py index 6c13818463..42e403f8d1 100644 --- a/api/migrations/versions/380c6aa5a70d_add_tool_labels_to_agent_thought.py +++ b/api/migrations/versions/380c6aa5a70d_add_tool_labels_to_agent_thought.py @@ -8,6 +8,12 @@ Create Date: 2024-01-24 10:58:15.644445 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '380c6aa5a70d' down_revision = 'dfb3b7f477da' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False)) + else: + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_labels_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/3b18fea55204_add_tool_label_bings.py b/api/migrations/versions/3b18fea55204_add_tool_label_bings.py index bf54c247ea..ffba6c9f36 100644 --- a/api/migrations/versions/3b18fea55204_add_tool_label_bings.py +++ b/api/migrations/versions/3b18fea55204_add_tool_label_bings.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '3b18fea55204' down_revision = '7bdef072e63a' @@ -19,13 +23,24 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_label_bindings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tool_id', sa.String(length=64), nullable=False), - sa.Column('tool_type', sa.String(length=40), nullable=False), - sa.Column('label_name', sa.String(length=40), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_label_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tool_id', sa.String(length=64), nullable=False), + sa.Column('tool_type', sa.String(length=40), nullable=False), + sa.Column('label_name', sa.String(length=40), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey') + ) + else: + op.create_table('tool_label_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tool_id', sa.String(length=64), nullable=False), + sa.Column('tool_type', sa.String(length=40), nullable=False), + sa.Column('label_name', sa.String(length=40), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey') + ) with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), server_default='', nullable=True)) diff --git a/api/migrations/versions/3c7cac9521c6_add_tags_and_binding_table.py b/api/migrations/versions/3c7cac9521c6_add_tags_and_binding_table.py index 5f11880683..6b2263b0b7 100644 --- a/api/migrations/versions/3c7cac9521c6_add_tags_and_binding_table.py +++ b/api/migrations/versions/3c7cac9521c6_add_tags_and_binding_table.py @@ -6,9 +6,15 @@ Create Date: 2024-04-11 06:17:34.278594 """ import sqlalchemy as sa -from alembic import op +from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '3c7cac9521c6' down_revision = 'c3311b089690' @@ -18,28 +24,54 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tag_bindings', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=True), - sa.Column('tag_id', postgresql.UUID(), nullable=True), - sa.Column('target_id', postgresql.UUID(), nullable=True), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tag_binding_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tag_bindings', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=True), + sa.Column('tag_id', postgresql.UUID(), nullable=True), + sa.Column('target_id', postgresql.UUID(), nullable=True), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tag_binding_pkey') + ) + else: + op.create_table('tag_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('tag_id', models.types.StringUUID(), nullable=True), + sa.Column('target_id', models.types.StringUUID(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tag_binding_pkey') + ) + with op.batch_alter_table('tag_bindings', schema=None) as batch_op: batch_op.create_index('tag_bind_tag_id_idx', ['tag_id'], unique=False) batch_op.create_index('tag_bind_target_id_idx', ['target_id'], unique=False) - op.create_table('tags', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=True), - sa.Column('type', sa.String(length=16), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tag_pkey') - ) + if _is_pg(conn): + op.create_table('tags', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=True), + sa.Column('type', sa.String(length=16), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tag_pkey') + ) + else: + op.create_table('tags', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('type', sa.String(length=16), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tag_pkey') + ) + with op.batch_alter_table('tags', schema=None) as batch_op: batch_op.create_index('tag_name_idx', ['name'], unique=False) batch_op.create_index('tag_type_idx', ['type'], unique=False) diff --git a/api/migrations/versions/3ef9b2b6bee6_add_assistant_app.py b/api/migrations/versions/3ef9b2b6bee6_add_assistant_app.py index 4fbc570303..553d1d8743 100644 --- a/api/migrations/versions/3ef9b2b6bee6_add_assistant_app.py +++ b/api/migrations/versions/3ef9b2b6bee6_add_assistant_app.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '3ef9b2b6bee6' down_revision = '89c7899ca936' @@ -18,44 +24,96 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_api_providers', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=40), nullable=False), - sa.Column('schema', sa.Text(), nullable=False), - sa.Column('schema_type_str', sa.String(length=40), nullable=False), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('description_str', sa.Text(), nullable=False), - sa.Column('tools_str', sa.Text(), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey') - ) - op.create_table('tool_builtin_providers', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=True), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('provider', sa.String(length=40), nullable=False), - sa.Column('encrypted_credentials', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider') - ) - op.create_table('tool_published_apps', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.Column('llm_description', sa.Text(), nullable=False), - sa.Column('query_description', sa.Text(), nullable=False), - sa.Column('query_name', sa.String(length=40), nullable=False), - sa.Column('tool_name', sa.String(length=40), nullable=False), - sa.Column('author', sa.String(length=40), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ), - sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'), - sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool') - ) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('tool_api_providers', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('schema', sa.Text(), nullable=False), + sa.Column('schema_type_str', sa.String(length=40), nullable=False), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('description_str', sa.Text(), nullable=False), + sa.Column('tools_str', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey') + ) + else: + # MySQL: Use compatible syntax + op.create_table('tool_api_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('schema', models.types.LongText(), nullable=False), + sa.Column('schema_type_str', sa.String(length=40), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('description_str', models.types.LongText(), nullable=False), + sa.Column('tools_str', models.types.LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey') + ) + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('tool_builtin_providers', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=True), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('provider', sa.String(length=40), nullable=False), + sa.Column('encrypted_credentials', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider') + ) + else: + # MySQL: Use compatible syntax + op.create_table('tool_builtin_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=40), nullable=False), + sa.Column('encrypted_credentials', models.types.LongText(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider') + ) + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('tool_published_apps', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('llm_description', sa.Text(), nullable=False), + sa.Column('query_description', sa.Text(), nullable=False), + sa.Column('query_name', sa.String(length=40), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('author', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ), + sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'), + sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool') + ) + else: + # MySQL: Use compatible syntax + op.create_table('tool_published_apps', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('description', models.types.LongText(), nullable=False), + sa.Column('llm_description', models.types.LongText(), nullable=False), + sa.Column('query_description', models.types.LongText(), nullable=False), + sa.Column('query_name', sa.String(length=40), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('author', sa.String(length=40), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ), + sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'), + sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py b/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py index f388b99b90..76056a9460 100644 --- a/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py +++ b/api/migrations/versions/42e85ed5564d_conversation_columns_set_nullable.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '42e85ed5564d' down_revision = 'f9107f83abab' @@ -18,31 +24,59 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('app_model_config_id', - existing_type=postgresql.UUID(), - nullable=True) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=True) - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('app_model_config_id', + existing_type=postgresql.UUID(), + nullable=True) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=True) + else: + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('app_model_config_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('conversations', schema=None) as batch_op: - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('app_model_config_id', - existing_type=postgresql.UUID(), - nullable=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('app_model_config_id', + existing_type=postgresql.UUID(), + nullable=False) + else: + with op.batch_alter_table('conversations', schema=None) as batch_op: + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('app_model_config_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/4823da1d26cf_add_tool_file.py b/api/migrations/versions/4823da1d26cf_add_tool_file.py index 1a473a10fe..9ef9c17a3a 100644 --- a/api/migrations/versions/4823da1d26cf_add_tool_file.py +++ b/api/migrations/versions/4823da1d26cf_add_tool_file.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '4823da1d26cf' down_revision = '053da0c1d756' @@ -18,16 +24,30 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_files', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('file_key', sa.String(length=255), nullable=False), - sa.Column('mimetype', sa.String(length=255), nullable=False), - sa.Column('original_url', sa.String(length=255), nullable=True), - sa.PrimaryKeyConstraint('id', name='tool_file_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_files', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('file_key', sa.String(length=255), nullable=False), + sa.Column('mimetype', sa.String(length=255), nullable=False), + sa.Column('original_url', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='tool_file_pkey') + ) + else: + op.create_table('tool_files', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('file_key', sa.String(length=255), nullable=False), + sa.Column('mimetype', sa.String(length=255), nullable=False), + sa.Column('original_url', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='tool_file_pkey') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py b/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py index 2405021856..ef066587b7 100644 --- a/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py +++ b/api/migrations/versions/4829e54d2fee_change_message_chain_id_to_nullable.py @@ -8,6 +8,12 @@ Create Date: 2024-01-12 03:42:27.362415 from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '4829e54d2fee' down_revision = '114eed84c228' @@ -17,19 +23,39 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=postgresql.UUID(), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=postgresql.UUID(), + nullable=True) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.alter_column('message_chain_id', - existing_type=postgresql.UUID(), - nullable=False) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=postgresql.UUID(), + nullable=False) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.alter_column('message_chain_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/4bcffcd64aa4_update_dataset_model_field_null_.py b/api/migrations/versions/4bcffcd64aa4_update_dataset_model_field_null_.py index 178bd24e3c..bee290e8dc 100644 --- a/api/migrations/versions/4bcffcd64aa4_update_dataset_model_field_null_.py +++ b/api/migrations/versions/4bcffcd64aa4_update_dataset_model_field_null_.py @@ -8,6 +8,10 @@ Create Date: 2023-08-28 20:58:50.077056 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '4bcffcd64aa4' down_revision = '853f9b9cd3b6' @@ -17,29 +21,55 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.alter_column('embedding_model', - existing_type=sa.VARCHAR(length=255), - nullable=True, - existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) - batch_op.alter_column('embedding_model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=True, - existing_server_default=sa.text("'openai'::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.alter_column('embedding_model', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + batch_op.alter_column('embedding_model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("'openai'::character varying")) + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.alter_column('embedding_model', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("'text-embedding-ada-002'")) + batch_op.alter_column('embedding_model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("'openai'")) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.alter_column('embedding_model_provider', - existing_type=sa.VARCHAR(length=255), - nullable=False, - existing_server_default=sa.text("'openai'::character varying")) - batch_op.alter_column('embedding_model', - existing_type=sa.VARCHAR(length=255), - nullable=False, - existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.alter_column('embedding_model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=False, + existing_server_default=sa.text("'openai'::character varying")) + batch_op.alter_column('embedding_model', + existing_type=sa.VARCHAR(length=255), + nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.alter_column('embedding_model_provider', + existing_type=sa.VARCHAR(length=255), + nullable=False, + existing_server_default=sa.text("'openai'")) + batch_op.alter_column('embedding_model', + existing_type=sa.VARCHAR(length=255), + nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'")) # ### end Alembic commands ### diff --git a/api/migrations/versions/4e99a8df00ff_add_load_balancing.py b/api/migrations/versions/4e99a8df00ff_add_load_balancing.py index 3be4ba4f2a..a2ab39bb28 100644 --- a/api/migrations/versions/4e99a8df00ff_add_load_balancing.py +++ b/api/migrations/versions/4e99a8df00ff_add_load_balancing.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '4e99a8df00ff' down_revision = '64a70a7aab8b' @@ -19,34 +23,67 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('load_balancing_model_configs', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_name', sa.String(length=255), nullable=False), - sa.Column('model_name', sa.String(length=255), nullable=False), - sa.Column('model_type', sa.String(length=40), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('encrypted_config', sa.Text(), nullable=True), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('load_balancing_model_configs', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=True), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey') + ) + else: + op.create_table('load_balancing_model_configs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', models.types.LongText(), nullable=True), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey') + ) + with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op: batch_op.create_index('load_balancing_model_config_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False) - op.create_table('provider_model_settings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('provider_name', sa.String(length=255), nullable=False), - sa.Column('model_name', sa.String(length=255), nullable=False), - sa.Column('model_type', sa.String(length=40), nullable=False), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey') - ) + if _is_pg(conn): + op.create_table('provider_model_settings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey') + ) + else: + op.create_table('provider_model_settings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey') + ) + with op.batch_alter_table('provider_model_settings', schema=None) as batch_op: batch_op.create_index('provider_model_setting_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False) diff --git a/api/migrations/versions/5022897aaceb_add_model_name_in_embedding.py b/api/migrations/versions/5022897aaceb_add_model_name_in_embedding.py index c0f4af5a00..5e4bceaef1 100644 --- a/api/migrations/versions/5022897aaceb_add_model_name_in_embedding.py +++ b/api/migrations/versions/5022897aaceb_add_model_name_in_embedding.py @@ -8,6 +8,10 @@ Create Date: 2023-08-11 14:38:15.499460 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '5022897aaceb' down_revision = 'bf0aec5ba2cf' @@ -17,10 +21,20 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False)) - batch_op.drop_constraint('embedding_hash_idx', type_='unique') - batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash']) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False)) + batch_op.drop_constraint('embedding_hash_idx', type_='unique') + batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash']) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'"), nullable=False)) + batch_op.drop_constraint('embedding_hash_idx', type_='unique') + batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash']) # ### end Alembic commands ### diff --git a/api/migrations/versions/53bf8af60645_update_model.py b/api/migrations/versions/53bf8af60645_update_model.py index 3d0928d013..bb4af075c1 100644 --- a/api/migrations/versions/53bf8af60645_update_model.py +++ b/api/migrations/versions/53bf8af60645_update_model.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '53bf8af60645' down_revision = '8e5588e6412e' @@ -19,23 +23,43 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.alter_column('provider_name', - existing_type=sa.VARCHAR(length=40), - type_=sa.String(length=255), - existing_nullable=False, - existing_server_default=sa.text("''::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.VARCHAR(length=40), + type_=sa.String(length=255), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + else: + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.VARCHAR(length=40), + type_=sa.String(length=255), + existing_nullable=False, + existing_server_default=sa.text("''")) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.alter_column('provider_name', - existing_type=sa.String(length=255), - type_=sa.VARCHAR(length=40), - existing_nullable=False, - existing_server_default=sa.text("''::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=40), + existing_nullable=False, + existing_server_default=sa.text("''::character varying")) + else: + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('provider_name', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=40), + existing_nullable=False, + existing_server_default=sa.text("''")) # ### end Alembic commands ### diff --git a/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py b/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py index 299f442de9..b080e7680b 100644 --- a/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py +++ b/api/migrations/versions/563cf8bf777b_enable_tool_file_without_conversation_id.py @@ -8,6 +8,12 @@ Create Date: 2024-03-14 04:54:56.679506 from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '563cf8bf777b' down_revision = 'b5429b71023c' @@ -17,19 +23,35 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=postgresql.UUID(), + nullable=True) + else: + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_files', schema=None) as batch_op: - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=postgresql.UUID(), + nullable=False) + else: + with op.batch_alter_table('tool_files', schema=None) as batch_op: + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/614f77cecc48_add_last_active_at.py b/api/migrations/versions/614f77cecc48_add_last_active_at.py index 182f8f89f1..6d5c5bf61f 100644 --- a/api/migrations/versions/614f77cecc48_add_last_active_at.py +++ b/api/migrations/versions/614f77cecc48_add_last_active_at.py @@ -8,6 +8,10 @@ Create Date: 2023-06-15 13:33:00.357467 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '614f77cecc48' down_revision = 'a45f4dfde53b' @@ -17,8 +21,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('accounts', schema=None) as batch_op: - batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('accounts', schema=None) as batch_op: + batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + else: + with op.batch_alter_table('accounts', schema=None) as batch_op: + batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/64b051264f32_init.py b/api/migrations/versions/64b051264f32_init.py index b0fb3deac6..ec0ae0fee2 100644 --- a/api/migrations/versions/64b051264f32_init.py +++ b/api/migrations/versions/64b051264f32_init.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '64b051264f32' down_revision = None @@ -18,263 +24,519 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') + conn = op.get_bind() + + if _is_pg(conn): + op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') + else: + pass - op.create_table('account_integrates', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('provider', sa.String(length=16), nullable=False), - sa.Column('open_id', sa.String(length=255), nullable=False), - sa.Column('encrypted_token', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='account_integrate_pkey'), - sa.UniqueConstraint('account_id', 'provider', name='unique_account_provider'), - sa.UniqueConstraint('provider', 'open_id', name='unique_provider_open_id') - ) - op.create_table('accounts', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('email', sa.String(length=255), nullable=False), - sa.Column('password', sa.String(length=255), nullable=True), - sa.Column('password_salt', sa.String(length=255), nullable=True), - sa.Column('avatar', sa.String(length=255), nullable=True), - sa.Column('interface_language', sa.String(length=255), nullable=True), - sa.Column('interface_theme', sa.String(length=255), nullable=True), - sa.Column('timezone', sa.String(length=255), nullable=True), - sa.Column('last_login_at', sa.DateTime(), nullable=True), - sa.Column('last_login_ip', sa.String(length=255), nullable=True), - sa.Column('status', sa.String(length=16), server_default=sa.text("'active'::character varying"), nullable=False), - sa.Column('initialized_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='account_pkey') - ) + if _is_pg(conn): + op.create_table('account_integrates', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('provider', sa.String(length=16), nullable=False), + sa.Column('open_id', sa.String(length=255), nullable=False), + sa.Column('encrypted_token', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='account_integrate_pkey'), + sa.UniqueConstraint('account_id', 'provider', name='unique_account_provider'), + sa.UniqueConstraint('provider', 'open_id', name='unique_provider_open_id') + ) + else: + op.create_table('account_integrates', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=16), nullable=False), + sa.Column('open_id', sa.String(length=255), nullable=False), + sa.Column('encrypted_token', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='account_integrate_pkey'), + sa.UniqueConstraint('account_id', 'provider', name='unique_account_provider'), + sa.UniqueConstraint('provider', 'open_id', name='unique_provider_open_id') + ) + if _is_pg(conn): + op.create_table('accounts', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=True), + sa.Column('password_salt', sa.String(length=255), nullable=True), + sa.Column('avatar', sa.String(length=255), nullable=True), + sa.Column('interface_language', sa.String(length=255), nullable=True), + sa.Column('interface_theme', sa.String(length=255), nullable=True), + sa.Column('timezone', sa.String(length=255), nullable=True), + sa.Column('last_login_at', sa.DateTime(), nullable=True), + sa.Column('last_login_ip', sa.String(length=255), nullable=True), + sa.Column('status', sa.String(length=16), server_default=sa.text("'active'::character varying"), nullable=False), + sa.Column('initialized_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='account_pkey') + ) + else: + op.create_table('accounts', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=True), + sa.Column('password_salt', sa.String(length=255), nullable=True), + sa.Column('avatar', sa.String(length=255), nullable=True), + sa.Column('interface_language', sa.String(length=255), nullable=True), + sa.Column('interface_theme', sa.String(length=255), nullable=True), + sa.Column('timezone', sa.String(length=255), nullable=True), + sa.Column('last_login_at', sa.DateTime(), nullable=True), + sa.Column('last_login_ip', sa.String(length=255), nullable=True), + sa.Column('status', sa.String(length=16), server_default=sa.text("'active'"), nullable=False), + sa.Column('initialized_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='account_pkey') + ) with op.batch_alter_table('accounts', schema=None) as batch_op: batch_op.create_index('account_email_idx', ['email'], unique=False) - op.create_table('api_requests', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('api_token_id', postgresql.UUID(), nullable=False), - sa.Column('path', sa.String(length=255), nullable=False), - sa.Column('request', sa.Text(), nullable=True), - sa.Column('response', sa.Text(), nullable=True), - sa.Column('ip', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='api_request_pkey') - ) + if _is_pg(conn): + op.create_table('api_requests', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('api_token_id', postgresql.UUID(), nullable=False), + sa.Column('path', sa.String(length=255), nullable=False), + sa.Column('request', sa.Text(), nullable=True), + sa.Column('response', sa.Text(), nullable=True), + sa.Column('ip', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_request_pkey') + ) + else: + op.create_table('api_requests', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('api_token_id', models.types.StringUUID(), nullable=False), + sa.Column('path', sa.String(length=255), nullable=False), + sa.Column('request', models.types.LongText(), nullable=True), + sa.Column('response', models.types.LongText(), nullable=True), + sa.Column('ip', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_request_pkey') + ) with op.batch_alter_table('api_requests', schema=None) as batch_op: batch_op.create_index('api_request_token_idx', ['tenant_id', 'api_token_id'], unique=False) - op.create_table('api_tokens', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=True), - sa.Column('dataset_id', postgresql.UUID(), nullable=True), - sa.Column('type', sa.String(length=16), nullable=False), - sa.Column('token', sa.String(length=255), nullable=False), - sa.Column('last_used_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='api_token_pkey') - ) + if _is_pg(conn): + op.create_table('api_tokens', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=True), + sa.Column('dataset_id', postgresql.UUID(), nullable=True), + sa.Column('type', sa.String(length=16), nullable=False), + sa.Column('token', sa.String(length=255), nullable=False), + sa.Column('last_used_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_token_pkey') + ) + else: + op.create_table('api_tokens', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=True), + sa.Column('dataset_id', models.types.StringUUID(), nullable=True), + sa.Column('type', sa.String(length=16), nullable=False), + sa.Column('token', sa.String(length=255), nullable=False), + sa.Column('last_used_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_token_pkey') + ) with op.batch_alter_table('api_tokens', schema=None) as batch_op: batch_op.create_index('api_token_app_id_type_idx', ['app_id', 'type'], unique=False) batch_op.create_index('api_token_token_idx', ['token', 'type'], unique=False) - op.create_table('app_dataset_joins', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_dataset_join_pkey') - ) + if _is_pg(conn): + op.create_table('app_dataset_joins', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_dataset_join_pkey') + ) + else: + op.create_table('app_dataset_joins', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_dataset_join_pkey') + ) with op.batch_alter_table('app_dataset_joins', schema=None) as batch_op: batch_op.create_index('app_dataset_join_app_dataset_idx', ['dataset_id', 'app_id'], unique=False) - op.create_table('app_model_configs', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('model_id', sa.String(length=255), nullable=False), - sa.Column('configs', sa.JSON(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('opening_statement', sa.Text(), nullable=True), - sa.Column('suggested_questions', sa.Text(), nullable=True), - sa.Column('suggested_questions_after_answer', sa.Text(), nullable=True), - sa.Column('more_like_this', sa.Text(), nullable=True), - sa.Column('model', sa.Text(), nullable=True), - sa.Column('user_input_form', sa.Text(), nullable=True), - sa.Column('pre_prompt', sa.Text(), nullable=True), - sa.Column('agent_mode', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id', name='app_model_config_pkey') - ) + if _is_pg(conn): + op.create_table('app_model_configs', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('configs', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('opening_statement', sa.Text(), nullable=True), + sa.Column('suggested_questions', sa.Text(), nullable=True), + sa.Column('suggested_questions_after_answer', sa.Text(), nullable=True), + sa.Column('more_like_this', sa.Text(), nullable=True), + sa.Column('model', sa.Text(), nullable=True), + sa.Column('user_input_form', sa.Text(), nullable=True), + sa.Column('pre_prompt', sa.Text(), nullable=True), + sa.Column('agent_mode', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id', name='app_model_config_pkey') + ) + else: + op.create_table('app_model_configs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('configs', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('opening_statement', models.types.LongText(), nullable=True), + sa.Column('suggested_questions', models.types.LongText(), nullable=True), + sa.Column('suggested_questions_after_answer', models.types.LongText(), nullable=True), + sa.Column('more_like_this', models.types.LongText(), nullable=True), + sa.Column('model', models.types.LongText(), nullable=True), + sa.Column('user_input_form', models.types.LongText(), nullable=True), + sa.Column('pre_prompt', models.types.LongText(), nullable=True), + sa.Column('agent_mode', models.types.LongText(), nullable=True), + sa.PrimaryKeyConstraint('id', name='app_model_config_pkey') + ) with op.batch_alter_table('app_model_configs', schema=None) as batch_op: batch_op.create_index('app_app_id_idx', ['app_id'], unique=False) - op.create_table('apps', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('mode', sa.String(length=255), nullable=False), - sa.Column('icon', sa.String(length=255), nullable=True), - sa.Column('icon_background', sa.String(length=255), nullable=True), - sa.Column('app_model_config_id', postgresql.UUID(), nullable=True), - sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), - sa.Column('enable_site', sa.Boolean(), nullable=False), - sa.Column('enable_api', sa.Boolean(), nullable=False), - sa.Column('api_rpm', sa.Integer(), nullable=False), - sa.Column('api_rph', sa.Integer(), nullable=False), - sa.Column('is_demo', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_pkey') - ) + if _is_pg(conn): + op.create_table('apps', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('mode', sa.String(length=255), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('icon_background', sa.String(length=255), nullable=True), + sa.Column('app_model_config_id', postgresql.UUID(), nullable=True), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), + sa.Column('enable_site', sa.Boolean(), nullable=False), + sa.Column('enable_api', sa.Boolean(), nullable=False), + sa.Column('api_rpm', sa.Integer(), nullable=False), + sa.Column('api_rph', sa.Integer(), nullable=False), + sa.Column('is_demo', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_pkey') + ) + else: + op.create_table('apps', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('mode', sa.String(length=255), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('icon_background', sa.String(length=255), nullable=True), + sa.Column('app_model_config_id', models.types.StringUUID(), nullable=True), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False), + sa.Column('enable_site', sa.Boolean(), nullable=False), + sa.Column('enable_api', sa.Boolean(), nullable=False), + sa.Column('api_rpm', sa.Integer(), nullable=False), + sa.Column('api_rph', sa.Integer(), nullable=False), + sa.Column('is_demo', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_pkey') + ) with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.create_index('app_tenant_id_idx', ['tenant_id'], unique=False) - op.execute('CREATE SEQUENCE task_id_sequence;') - op.execute('CREATE SEQUENCE taskset_id_sequence;') + if _is_pg(conn): + op.execute('CREATE SEQUENCE task_id_sequence;') + op.execute('CREATE SEQUENCE taskset_id_sequence;') + else: + pass - op.create_table('celery_taskmeta', - sa.Column('id', sa.Integer(), nullable=False, - server_default=sa.text('nextval(\'task_id_sequence\')')), - sa.Column('task_id', sa.String(length=155), nullable=True), - sa.Column('status', sa.String(length=50), nullable=True), - sa.Column('result', sa.PickleType(), nullable=True), - sa.Column('date_done', sa.DateTime(), nullable=True), - sa.Column('traceback', sa.Text(), nullable=True), - sa.Column('name', sa.String(length=155), nullable=True), - sa.Column('args', sa.LargeBinary(), nullable=True), - sa.Column('kwargs', sa.LargeBinary(), nullable=True), - sa.Column('worker', sa.String(length=155), nullable=True), - sa.Column('retries', sa.Integer(), nullable=True), - sa.Column('queue', sa.String(length=155), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('task_id') - ) - op.create_table('celery_tasksetmeta', - sa.Column('id', sa.Integer(), nullable=False, - server_default=sa.text('nextval(\'taskset_id_sequence\')')), - sa.Column('taskset_id', sa.String(length=155), nullable=True), - sa.Column('result', sa.PickleType(), nullable=True), - sa.Column('date_done', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('taskset_id') - ) - op.create_table('conversations', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('app_model_config_id', postgresql.UUID(), nullable=False), - sa.Column('model_provider', sa.String(length=255), nullable=False), - sa.Column('override_model_configs', sa.Text(), nullable=True), - sa.Column('model_id', sa.String(length=255), nullable=False), - sa.Column('mode', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('summary', sa.Text(), nullable=True), - sa.Column('inputs', sa.JSON(), nullable=True), - sa.Column('introduction', sa.Text(), nullable=True), - sa.Column('system_instruction', sa.Text(), nullable=True), - sa.Column('system_instruction_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('status', sa.String(length=255), nullable=False), - sa.Column('from_source', sa.String(length=255), nullable=False), - sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), - sa.Column('from_account_id', postgresql.UUID(), nullable=True), - sa.Column('read_at', sa.DateTime(), nullable=True), - sa.Column('read_account_id', postgresql.UUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='conversation_pkey') - ) + if _is_pg(conn): + op.create_table('celery_taskmeta', + sa.Column('id', sa.Integer(), nullable=False, + server_default=sa.text('nextval(\'task_id_sequence\')')), + sa.Column('task_id', sa.String(length=155), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('result', sa.PickleType(), nullable=True), + sa.Column('date_done', sa.DateTime(), nullable=True), + sa.Column('traceback', sa.Text(), nullable=True), + sa.Column('name', sa.String(length=155), nullable=True), + sa.Column('args', sa.LargeBinary(), nullable=True), + sa.Column('kwargs', sa.LargeBinary(), nullable=True), + sa.Column('worker', sa.String(length=155), nullable=True), + sa.Column('retries', sa.Integer(), nullable=True), + sa.Column('queue', sa.String(length=155), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('task_id') + ) + else: + op.create_table('celery_taskmeta', + sa.Column('id', sa.Integer(), nullable=False, autoincrement=True), + sa.Column('task_id', sa.String(length=155), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('result', models.types.BinaryData(), nullable=True), + sa.Column('date_done', sa.DateTime(), nullable=True), + sa.Column('traceback', models.types.LongText(), nullable=True), + sa.Column('name', sa.String(length=155), nullable=True), + sa.Column('args', models.types.BinaryData(), nullable=True), + sa.Column('kwargs', models.types.BinaryData(), nullable=True), + sa.Column('worker', sa.String(length=155), nullable=True), + sa.Column('retries', sa.Integer(), nullable=True), + sa.Column('queue', sa.String(length=155), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('task_id') + ) + if _is_pg(conn): + op.create_table('celery_tasksetmeta', + sa.Column('id', sa.Integer(), nullable=False, + server_default=sa.text('nextval(\'taskset_id_sequence\')')), + sa.Column('taskset_id', sa.String(length=155), nullable=True), + sa.Column('result', sa.PickleType(), nullable=True), + sa.Column('date_done', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('taskset_id') + ) + else: + op.create_table('celery_tasksetmeta', + sa.Column('id', sa.Integer(), nullable=False, autoincrement=True), + sa.Column('taskset_id', sa.String(length=155), nullable=True), + sa.Column('result', models.types.BinaryData(), nullable=True), + sa.Column('date_done', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('taskset_id') + ) + if _is_pg(conn): + op.create_table('conversations', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('app_model_config_id', postgresql.UUID(), nullable=False), + sa.Column('model_provider', sa.String(length=255), nullable=False), + sa.Column('override_model_configs', sa.Text(), nullable=True), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('mode', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('summary', sa.Text(), nullable=True), + sa.Column('inputs', sa.JSON(), nullable=True), + sa.Column('introduction', sa.Text(), nullable=True), + sa.Column('system_instruction', sa.Text(), nullable=True), + sa.Column('system_instruction_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), + sa.Column('from_account_id', postgresql.UUID(), nullable=True), + sa.Column('read_at', sa.DateTime(), nullable=True), + sa.Column('read_account_id', postgresql.UUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='conversation_pkey') + ) + else: + op.create_table('conversations', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('app_model_config_id', models.types.StringUUID(), nullable=False), + sa.Column('model_provider', sa.String(length=255), nullable=False), + sa.Column('override_model_configs', models.types.LongText(), nullable=True), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('mode', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('summary', models.types.LongText(), nullable=True), + sa.Column('inputs', sa.JSON(), nullable=True), + sa.Column('introduction', models.types.LongText(), nullable=True), + sa.Column('system_instruction', models.types.LongText(), nullable=True), + sa.Column('system_instruction_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', models.types.StringUUID(), nullable=True), + sa.Column('from_account_id', models.types.StringUUID(), nullable=True), + sa.Column('read_at', sa.DateTime(), nullable=True), + sa.Column('read_account_id', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='conversation_pkey') + ) with op.batch_alter_table('conversations', schema=None) as batch_op: batch_op.create_index('conversation_app_from_user_idx', ['app_id', 'from_source', 'from_end_user_id'], unique=False) - op.create_table('dataset_keyword_tables', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('keyword_table', sa.Text(), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_keyword_table_pkey'), - sa.UniqueConstraint('dataset_id') - ) + if _is_pg(conn): + op.create_table('dataset_keyword_tables', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('keyword_table', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_keyword_table_pkey'), + sa.UniqueConstraint('dataset_id') + ) + else: + op.create_table('dataset_keyword_tables', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('keyword_table', models.types.LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_keyword_table_pkey'), + sa.UniqueConstraint('dataset_id') + ) with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op: batch_op.create_index('dataset_keyword_table_dataset_id_idx', ['dataset_id'], unique=False) - op.create_table('dataset_process_rules', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('mode', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False), - sa.Column('rules', sa.Text(), nullable=True), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_process_rule_pkey') - ) + if _is_pg(conn): + op.create_table('dataset_process_rules', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('mode', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False), + sa.Column('rules', sa.Text(), nullable=True), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_process_rule_pkey') + ) + else: + op.create_table('dataset_process_rules', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('mode', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False), + sa.Column('rules', models.types.LongText(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_process_rule_pkey') + ) with op.batch_alter_table('dataset_process_rules', schema=None) as batch_op: batch_op.create_index('dataset_process_rule_dataset_id_idx', ['dataset_id'], unique=False) - op.create_table('dataset_queries', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('content', sa.Text(), nullable=False), - sa.Column('source', sa.String(length=255), nullable=False), - sa.Column('source_app_id', postgresql.UUID(), nullable=True), - sa.Column('created_by_role', sa.String(), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_query_pkey') - ) + if _is_pg(conn): + op.create_table('dataset_queries', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('source', sa.String(length=255), nullable=False), + sa.Column('source_app_id', postgresql.UUID(), nullable=True), + sa.Column('created_by_role', sa.String(), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_query_pkey') + ) + else: + op.create_table('dataset_queries', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('content', models.types.LongText(), nullable=False), + sa.Column('source', sa.String(length=255), nullable=False), + sa.Column('source_app_id', models.types.StringUUID(), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_query_pkey') + ) with op.batch_alter_table('dataset_queries', schema=None) as batch_op: batch_op.create_index('dataset_query_dataset_id_idx', ['dataset_id'], unique=False) - op.create_table('datasets', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('provider', sa.String(length=255), server_default=sa.text("'vendor'::character varying"), nullable=False), - sa.Column('permission', sa.String(length=255), server_default=sa.text("'only_me'::character varying"), nullable=False), - sa.Column('data_source_type', sa.String(length=255), nullable=True), - sa.Column('indexing_technique', sa.String(length=255), nullable=True), - sa.Column('index_struct', sa.Text(), nullable=True), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_by', postgresql.UUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_pkey') - ) + if _is_pg(conn): + op.create_table('datasets', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('provider', sa.String(length=255), server_default=sa.text("'vendor'::character varying"), nullable=False), + sa.Column('permission', sa.String(length=255), server_default=sa.text("'only_me'::character varying"), nullable=False), + sa.Column('data_source_type', sa.String(length=255), nullable=True), + sa.Column('indexing_technique', sa.String(length=255), nullable=True), + sa.Column('index_struct', sa.Text(), nullable=True), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', postgresql.UUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_pkey') + ) + else: + op.create_table('datasets', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', models.types.LongText(), nullable=True), + sa.Column('provider', sa.String(length=255), server_default=sa.text("'vendor'"), nullable=False), + sa.Column('permission', sa.String(length=255), server_default=sa.text("'only_me'"), nullable=False), + sa.Column('data_source_type', sa.String(length=255), nullable=True), + sa.Column('indexing_technique', sa.String(length=255), nullable=True), + sa.Column('index_struct', models.types.LongText(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_pkey') + ) with op.batch_alter_table('datasets', schema=None) as batch_op: batch_op.create_index('dataset_tenant_idx', ['tenant_id'], unique=False) - op.create_table('dify_setups', - sa.Column('version', sa.String(length=255), nullable=False), - sa.Column('setup_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('version', name='dify_setup_pkey') - ) - op.create_table('document_segments', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('document_id', postgresql.UUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('content', sa.Text(), nullable=False), - sa.Column('word_count', sa.Integer(), nullable=False), - sa.Column('tokens', sa.Integer(), nullable=False), - sa.Column('keywords', sa.JSON(), nullable=True), - sa.Column('index_node_id', sa.String(length=255), nullable=True), - sa.Column('index_node_hash', sa.String(length=255), nullable=True), - sa.Column('hit_count', sa.Integer(), nullable=False), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('disabled_at', sa.DateTime(), nullable=True), - sa.Column('disabled_by', postgresql.UUID(), nullable=True), - sa.Column('status', sa.String(length=255), server_default=sa.text("'waiting'::character varying"), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('indexing_at', sa.DateTime(), nullable=True), - sa.Column('completed_at', sa.DateTime(), nullable=True), - sa.Column('error', sa.Text(), nullable=True), - sa.Column('stopped_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id', name='document_segment_pkey') - ) + if _is_pg(conn): + op.create_table('dify_setups', + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('setup_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('version', name='dify_setup_pkey') + ) + else: + op.create_table('dify_setups', + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('setup_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('version', name='dify_setup_pkey') + ) + if _is_pg(conn): + op.create_table('document_segments', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('document_id', postgresql.UUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('word_count', sa.Integer(), nullable=False), + sa.Column('tokens', sa.Integer(), nullable=False), + sa.Column('keywords', sa.JSON(), nullable=True), + sa.Column('index_node_id', sa.String(length=255), nullable=True), + sa.Column('index_node_hash', sa.String(length=255), nullable=True), + sa.Column('hit_count', sa.Integer(), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('disabled_at', sa.DateTime(), nullable=True), + sa.Column('disabled_by', postgresql.UUID(), nullable=True), + sa.Column('status', sa.String(length=255), server_default=sa.text("'waiting'::character varying"), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('indexing_at', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('stopped_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='document_segment_pkey') + ) + else: + op.create_table('document_segments', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('content', models.types.LongText(), nullable=False), + sa.Column('word_count', sa.Integer(), nullable=False), + sa.Column('tokens', sa.Integer(), nullable=False), + sa.Column('keywords', sa.JSON(), nullable=True), + sa.Column('index_node_id', sa.String(length=255), nullable=True), + sa.Column('index_node_hash', sa.String(length=255), nullable=True), + sa.Column('hit_count', sa.Integer(), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('disabled_at', sa.DateTime(), nullable=True), + sa.Column('disabled_by', models.types.StringUUID(), nullable=True), + sa.Column('status', sa.String(length=255), server_default=sa.text("'waiting'"), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('indexing_at', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('error', models.types.LongText(), nullable=True), + sa.Column('stopped_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='document_segment_pkey') + ) with op.batch_alter_table('document_segments', schema=None) as batch_op: batch_op.create_index('document_segment_dataset_id_idx', ['dataset_id'], unique=False) batch_op.create_index('document_segment_dataset_node_idx', ['dataset_id', 'index_node_id'], unique=False) @@ -282,359 +544,692 @@ def upgrade(): batch_op.create_index('document_segment_tenant_dataset_idx', ['dataset_id', 'tenant_id'], unique=False) batch_op.create_index('document_segment_tenant_document_idx', ['document_id', 'tenant_id'], unique=False) - op.create_table('documents', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('data_source_type', sa.String(length=255), nullable=False), - sa.Column('data_source_info', sa.Text(), nullable=True), - sa.Column('dataset_process_rule_id', postgresql.UUID(), nullable=True), - sa.Column('batch', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('created_from', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_api_request_id', postgresql.UUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('processing_started_at', sa.DateTime(), nullable=True), - sa.Column('file_id', sa.Text(), nullable=True), - sa.Column('word_count', sa.Integer(), nullable=True), - sa.Column('parsing_completed_at', sa.DateTime(), nullable=True), - sa.Column('cleaning_completed_at', sa.DateTime(), nullable=True), - sa.Column('splitting_completed_at', sa.DateTime(), nullable=True), - sa.Column('tokens', sa.Integer(), nullable=True), - sa.Column('indexing_latency', sa.Float(), nullable=True), - sa.Column('completed_at', sa.DateTime(), nullable=True), - sa.Column('is_paused', sa.Boolean(), server_default=sa.text('false'), nullable=True), - sa.Column('paused_by', postgresql.UUID(), nullable=True), - sa.Column('paused_at', sa.DateTime(), nullable=True), - sa.Column('error', sa.Text(), nullable=True), - sa.Column('stopped_at', sa.DateTime(), nullable=True), - sa.Column('indexing_status', sa.String(length=255), server_default=sa.text("'waiting'::character varying"), nullable=False), - sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('disabled_at', sa.DateTime(), nullable=True), - sa.Column('disabled_by', postgresql.UUID(), nullable=True), - sa.Column('archived', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('archived_reason', sa.String(length=255), nullable=True), - sa.Column('archived_by', postgresql.UUID(), nullable=True), - sa.Column('archived_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('doc_type', sa.String(length=40), nullable=True), - sa.Column('doc_metadata', sa.JSON(), nullable=True), - sa.PrimaryKeyConstraint('id', name='document_pkey') - ) + if _is_pg(conn): + op.create_table('documents', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('data_source_type', sa.String(length=255), nullable=False), + sa.Column('data_source_info', sa.Text(), nullable=True), + sa.Column('dataset_process_rule_id', postgresql.UUID(), nullable=True), + sa.Column('batch', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_from', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_api_request_id', postgresql.UUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('processing_started_at', sa.DateTime(), nullable=True), + sa.Column('file_id', sa.Text(), nullable=True), + sa.Column('word_count', sa.Integer(), nullable=True), + sa.Column('parsing_completed_at', sa.DateTime(), nullable=True), + sa.Column('cleaning_completed_at', sa.DateTime(), nullable=True), + sa.Column('splitting_completed_at', sa.DateTime(), nullable=True), + sa.Column('tokens', sa.Integer(), nullable=True), + sa.Column('indexing_latency', sa.Float(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('is_paused', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.Column('paused_by', postgresql.UUID(), nullable=True), + sa.Column('paused_at', sa.DateTime(), nullable=True), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('stopped_at', sa.DateTime(), nullable=True), + sa.Column('indexing_status', sa.String(length=255), server_default=sa.text("'waiting'::character varying"), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('disabled_at', sa.DateTime(), nullable=True), + sa.Column('disabled_by', postgresql.UUID(), nullable=True), + sa.Column('archived', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('archived_reason', sa.String(length=255), nullable=True), + sa.Column('archived_by', postgresql.UUID(), nullable=True), + sa.Column('archived_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('doc_type', sa.String(length=40), nullable=True), + sa.Column('doc_metadata', sa.JSON(), nullable=True), + sa.PrimaryKeyConstraint('id', name='document_pkey') + ) + else: + op.create_table('documents', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('data_source_type', sa.String(length=255), nullable=False), + sa.Column('data_source_info', models.types.LongText(), nullable=True), + sa.Column('dataset_process_rule_id', models.types.StringUUID(), nullable=True), + sa.Column('batch', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_from', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_api_request_id', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('processing_started_at', sa.DateTime(), nullable=True), + sa.Column('file_id', models.types.LongText(), nullable=True), + sa.Column('word_count', sa.Integer(), nullable=True), + sa.Column('parsing_completed_at', sa.DateTime(), nullable=True), + sa.Column('cleaning_completed_at', sa.DateTime(), nullable=True), + sa.Column('splitting_completed_at', sa.DateTime(), nullable=True), + sa.Column('tokens', sa.Integer(), nullable=True), + sa.Column('indexing_latency', sa.Float(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('is_paused', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.Column('paused_by', models.types.StringUUID(), nullable=True), + sa.Column('paused_at', sa.DateTime(), nullable=True), + sa.Column('error', models.types.LongText(), nullable=True), + sa.Column('stopped_at', sa.DateTime(), nullable=True), + sa.Column('indexing_status', sa.String(length=255), server_default=sa.text("'waiting'"), nullable=False), + sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('disabled_at', sa.DateTime(), nullable=True), + sa.Column('disabled_by', models.types.StringUUID(), nullable=True), + sa.Column('archived', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('archived_reason', sa.String(length=255), nullable=True), + sa.Column('archived_by', models.types.StringUUID(), nullable=True), + sa.Column('archived_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('doc_type', sa.String(length=40), nullable=True), + sa.Column('doc_metadata', sa.JSON(), nullable=True), + sa.PrimaryKeyConstraint('id', name='document_pkey') + ) with op.batch_alter_table('documents', schema=None) as batch_op: batch_op.create_index('document_dataset_id_idx', ['dataset_id'], unique=False) batch_op.create_index('document_is_paused_idx', ['is_paused'], unique=False) - op.create_table('embeddings', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('hash', sa.String(length=64), nullable=False), - sa.Column('embedding', sa.LargeBinary(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='embedding_pkey'), - sa.UniqueConstraint('hash', name='embedding_hash_idx') - ) - op.create_table('end_users', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=True), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('external_user_id', sa.String(length=255), nullable=True), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('is_anonymous', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('session_id', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='end_user_pkey') - ) + if _is_pg(conn): + op.create_table('embeddings', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('hash', sa.String(length=64), nullable=False), + sa.Column('embedding', sa.LargeBinary(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='embedding_pkey'), + sa.UniqueConstraint('hash', name='embedding_hash_idx') + ) + else: + op.create_table('embeddings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('hash', sa.String(length=64), nullable=False), + sa.Column('embedding', models.types.BinaryData(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='embedding_pkey'), + sa.UniqueConstraint('hash', name='embedding_hash_idx') + ) + if _is_pg(conn): + op.create_table('end_users', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=True), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('external_user_id', sa.String(length=255), nullable=True), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('is_anonymous', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('session_id', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='end_user_pkey') + ) + else: + op.create_table('end_users', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=True), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('external_user_id', sa.String(length=255), nullable=True), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('is_anonymous', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('session_id', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='end_user_pkey') + ) with op.batch_alter_table('end_users', schema=None) as batch_op: batch_op.create_index('end_user_session_id_idx', ['session_id', 'type'], unique=False) batch_op.create_index('end_user_tenant_session_id_idx', ['tenant_id', 'session_id', 'type'], unique=False) - op.create_table('installed_apps', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('app_owner_tenant_id', postgresql.UUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('is_pinned', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('last_used_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='installed_app_pkey'), - sa.UniqueConstraint('tenant_id', 'app_id', name='unique_tenant_app') - ) + if _is_pg(conn): + op.create_table('installed_apps', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('app_owner_tenant_id', postgresql.UUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('is_pinned', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('last_used_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='installed_app_pkey'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_tenant_app') + ) + else: + op.create_table('installed_apps', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('app_owner_tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('is_pinned', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('last_used_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='installed_app_pkey'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_tenant_app') + ) with op.batch_alter_table('installed_apps', schema=None) as batch_op: batch_op.create_index('installed_app_app_id_idx', ['app_id'], unique=False) batch_op.create_index('installed_app_tenant_id_idx', ['tenant_id'], unique=False) - op.create_table('invitation_codes', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('batch', sa.String(length=255), nullable=False), - sa.Column('code', sa.String(length=32), nullable=False), - sa.Column('status', sa.String(length=16), server_default=sa.text("'unused'::character varying"), nullable=False), - sa.Column('used_at', sa.DateTime(), nullable=True), - sa.Column('used_by_tenant_id', postgresql.UUID(), nullable=True), - sa.Column('used_by_account_id', postgresql.UUID(), nullable=True), - sa.Column('deprecated_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='invitation_code_pkey') - ) + if _is_pg(conn): + op.create_table('invitation_codes', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('batch', sa.String(length=255), nullable=False), + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('status', sa.String(length=16), server_default=sa.text("'unused'::character varying"), nullable=False), + sa.Column('used_at', sa.DateTime(), nullable=True), + sa.Column('used_by_tenant_id', postgresql.UUID(), nullable=True), + sa.Column('used_by_account_id', postgresql.UUID(), nullable=True), + sa.Column('deprecated_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='invitation_code_pkey') + ) + else: + op.create_table('invitation_codes', + sa.Column('id', sa.Integer(), nullable=False, autoincrement=True), + sa.Column('batch', sa.String(length=255), nullable=False), + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('status', sa.String(length=16), server_default=sa.text("'unused'"), nullable=False), + sa.Column('used_at', sa.DateTime(), nullable=True), + sa.Column('used_by_tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('used_by_account_id', models.types.StringUUID(), nullable=True), + sa.Column('deprecated_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='invitation_code_pkey') + ) with op.batch_alter_table('invitation_codes', schema=None) as batch_op: batch_op.create_index('invitation_codes_batch_idx', ['batch'], unique=False) batch_op.create_index('invitation_codes_code_idx', ['code', 'status'], unique=False) - op.create_table('message_agent_thoughts', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('message_chain_id', postgresql.UUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('thought', sa.Text(), nullable=True), - sa.Column('tool', sa.Text(), nullable=True), - sa.Column('tool_input', sa.Text(), nullable=True), - sa.Column('observation', sa.Text(), nullable=True), - sa.Column('tool_process_data', sa.Text(), nullable=True), - sa.Column('message', sa.Text(), nullable=True), - sa.Column('message_token', sa.Integer(), nullable=True), - sa.Column('message_unit_price', sa.Numeric(), nullable=True), - sa.Column('answer', sa.Text(), nullable=True), - sa.Column('answer_token', sa.Integer(), nullable=True), - sa.Column('answer_unit_price', sa.Numeric(), nullable=True), - sa.Column('tokens', sa.Integer(), nullable=True), - sa.Column('total_price', sa.Numeric(), nullable=True), - sa.Column('currency', sa.String(), nullable=True), - sa.Column('latency', sa.Float(), nullable=True), - sa.Column('created_by_role', sa.String(), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_agent_thought_pkey') - ) + if _is_pg(conn): + op.create_table('message_agent_thoughts', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('message_chain_id', postgresql.UUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('thought', sa.Text(), nullable=True), + sa.Column('tool', sa.Text(), nullable=True), + sa.Column('tool_input', sa.Text(), nullable=True), + sa.Column('observation', sa.Text(), nullable=True), + sa.Column('tool_process_data', sa.Text(), nullable=True), + sa.Column('message', sa.Text(), nullable=True), + sa.Column('message_token', sa.Integer(), nullable=True), + sa.Column('message_unit_price', sa.Numeric(), nullable=True), + sa.Column('answer', sa.Text(), nullable=True), + sa.Column('answer_token', sa.Integer(), nullable=True), + sa.Column('answer_unit_price', sa.Numeric(), nullable=True), + sa.Column('tokens', sa.Integer(), nullable=True), + sa.Column('total_price', sa.Numeric(), nullable=True), + sa.Column('currency', sa.String(), nullable=True), + sa.Column('latency', sa.Float(), nullable=True), + sa.Column('created_by_role', sa.String(), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_agent_thought_pkey') + ) + else: + op.create_table('message_agent_thoughts', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('message_chain_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('thought', models.types.LongText(), nullable=True), + sa.Column('tool', models.types.LongText(), nullable=True), + sa.Column('tool_input', models.types.LongText(), nullable=True), + sa.Column('observation', models.types.LongText(), nullable=True), + sa.Column('tool_process_data', models.types.LongText(), nullable=True), + sa.Column('message', models.types.LongText(), nullable=True), + sa.Column('message_token', sa.Integer(), nullable=True), + sa.Column('message_unit_price', sa.Numeric(), nullable=True), + sa.Column('answer', models.types.LongText(), nullable=True), + sa.Column('answer_token', sa.Integer(), nullable=True), + sa.Column('answer_unit_price', sa.Numeric(), nullable=True), + sa.Column('tokens', sa.Integer(), nullable=True), + sa.Column('total_price', sa.Numeric(), nullable=True), + sa.Column('currency', sa.String(length=255), nullable=True), + sa.Column('latency', sa.Float(), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_agent_thought_pkey') + ) with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: batch_op.create_index('message_agent_thought_message_chain_id_idx', ['message_chain_id'], unique=False) batch_op.create_index('message_agent_thought_message_id_idx', ['message_id'], unique=False) - op.create_table('message_chains', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('input', sa.Text(), nullable=True), - sa.Column('output', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_chain_pkey') - ) + if _is_pg(conn): + op.create_table('message_chains', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('input', sa.Text(), nullable=True), + sa.Column('output', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_chain_pkey') + ) + else: + op.create_table('message_chains', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('input', models.types.LongText(), nullable=True), + sa.Column('output', models.types.LongText(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_chain_pkey') + ) with op.batch_alter_table('message_chains', schema=None) as batch_op: batch_op.create_index('message_chain_message_id_idx', ['message_id'], unique=False) - op.create_table('message_feedbacks', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('rating', sa.String(length=255), nullable=False), - sa.Column('content', sa.Text(), nullable=True), - sa.Column('from_source', sa.String(length=255), nullable=False), - sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), - sa.Column('from_account_id', postgresql.UUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_feedback_pkey') - ) + if _is_pg(conn): + op.create_table('message_feedbacks', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('rating', sa.String(length=255), nullable=False), + sa.Column('content', sa.Text(), nullable=True), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), + sa.Column('from_account_id', postgresql.UUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_feedback_pkey') + ) + else: + op.create_table('message_feedbacks', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('rating', sa.String(length=255), nullable=False), + sa.Column('content', models.types.LongText(), nullable=True), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', models.types.StringUUID(), nullable=True), + sa.Column('from_account_id', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_feedback_pkey') + ) with op.batch_alter_table('message_feedbacks', schema=None) as batch_op: batch_op.create_index('message_feedback_app_idx', ['app_id'], unique=False) batch_op.create_index('message_feedback_conversation_idx', ['conversation_id', 'from_source', 'rating'], unique=False) batch_op.create_index('message_feedback_message_idx', ['message_id', 'from_source'], unique=False) - op.create_table('operation_logs', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('action', sa.String(length=255), nullable=False), - sa.Column('content', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('created_ip', sa.String(length=255), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='operation_log_pkey') - ) + if _is_pg(conn): + op.create_table('operation_logs', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('action', sa.String(length=255), nullable=False), + sa.Column('content', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('created_ip', sa.String(length=255), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='operation_log_pkey') + ) + else: + op.create_table('operation_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('action', sa.String(length=255), nullable=False), + sa.Column('content', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_ip', sa.String(length=255), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='operation_log_pkey') + ) with op.batch_alter_table('operation_logs', schema=None) as batch_op: batch_op.create_index('operation_log_account_action_idx', ['tenant_id', 'account_id', 'action'], unique=False) - op.create_table('pinned_conversations', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='pinned_conversation_pkey') - ) + if _is_pg(conn): + op.create_table('pinned_conversations', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='pinned_conversation_pkey') + ) + else: + op.create_table('pinned_conversations', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='pinned_conversation_pkey') + ) with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by'], unique=False) - op.create_table('providers', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('provider_type', sa.String(length=40), nullable=False, server_default=sa.text("'custom'::character varying")), - sa.Column('encrypted_config', sa.Text(), nullable=True), - sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('last_used', sa.DateTime(), nullable=True), - sa.Column('quota_type', sa.String(length=40), nullable=True, server_default=sa.text("''::character varying")), - sa.Column('quota_limit', sa.Integer(), nullable=True), - sa.Column('quota_used', sa.Integer(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_pkey'), - sa.UniqueConstraint('tenant_id', 'provider_name', 'provider_type', 'quota_type', name='unique_provider_name_type_quota') - ) + if _is_pg(conn): + op.create_table('providers', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('provider_type', sa.String(length=40), nullable=False, server_default=sa.text("'custom'::character varying")), + sa.Column('encrypted_config', sa.Text(), nullable=True), + sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('last_used', sa.DateTime(), nullable=True), + sa.Column('quota_type', sa.String(length=40), nullable=True, server_default=sa.text("''::character varying")), + sa.Column('quota_limit', sa.Integer(), nullable=True), + sa.Column('quota_used', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_name', 'provider_type', 'quota_type', name='unique_provider_name_type_quota') + ) + else: + op.create_table('providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('provider_type', sa.String(length=40), nullable=False, server_default=sa.text("'custom'")), + sa.Column('encrypted_config', models.types.LongText(), nullable=True), + sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('last_used', sa.DateTime(), nullable=True), + sa.Column('quota_type', sa.String(length=40), nullable=True, server_default=sa.text("''")), + sa.Column('quota_limit', sa.Integer(), nullable=True), + sa.Column('quota_used', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_pkey'), + sa.UniqueConstraint('tenant_id', 'provider_name', 'provider_type', 'quota_type', name='unique_provider_name_type_quota') + ) with op.batch_alter_table('providers', schema=None) as batch_op: batch_op.create_index('provider_tenant_id_provider_idx', ['tenant_id', 'provider_name'], unique=False) - op.create_table('recommended_apps', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('description', sa.JSON(), nullable=False), - sa.Column('copyright', sa.String(length=255), nullable=False), - sa.Column('privacy_policy', sa.String(length=255), nullable=False), - sa.Column('category', sa.String(length=255), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('is_listed', sa.Boolean(), nullable=False), - sa.Column('install_count', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='recommended_app_pkey') - ) + if _is_pg(conn): + op.create_table('recommended_apps', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('description', sa.JSON(), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=False), + sa.Column('privacy_policy', sa.String(length=255), nullable=False), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('is_listed', sa.Boolean(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='recommended_app_pkey') + ) + else: + op.create_table('recommended_apps', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('description', sa.JSON(), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=False), + sa.Column('privacy_policy', sa.String(length=255), nullable=False), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('is_listed', sa.Boolean(), nullable=False), + sa.Column('install_count', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='recommended_app_pkey') + ) with op.batch_alter_table('recommended_apps', schema=None) as batch_op: batch_op.create_index('recommended_app_app_id_idx', ['app_id'], unique=False) batch_op.create_index('recommended_app_is_listed_idx', ['is_listed'], unique=False) - op.create_table('saved_messages', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='saved_message_pkey') - ) + if _is_pg(conn): + op.create_table('saved_messages', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='saved_message_pkey') + ) + else: + op.create_table('saved_messages', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='saved_message_pkey') + ) with op.batch_alter_table('saved_messages', schema=None) as batch_op: batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by'], unique=False) - op.create_table('sessions', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('session_id', sa.String(length=255), nullable=True), - sa.Column('data', sa.LargeBinary(), nullable=True), - sa.Column('expiry', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('session_id') - ) - op.create_table('sites', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('icon', sa.String(length=255), nullable=True), - sa.Column('icon_background', sa.String(length=255), nullable=True), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('default_language', sa.String(length=255), nullable=False), - sa.Column('copyright', sa.String(length=255), nullable=True), - sa.Column('privacy_policy', sa.String(length=255), nullable=True), - sa.Column('customize_domain', sa.String(length=255), nullable=True), - sa.Column('customize_token_strategy', sa.String(length=255), nullable=False), - sa.Column('prompt_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('code', sa.String(length=255), nullable=True), - sa.PrimaryKeyConstraint('id', name='site_pkey') - ) + if _is_pg(conn): + op.create_table('sessions', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('session_id', sa.String(length=255), nullable=True), + sa.Column('data', sa.LargeBinary(), nullable=True), + sa.Column('expiry', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('session_id') + ) + else: + op.create_table('sessions', + sa.Column('id', sa.Integer(), nullable=False, autoincrement=True), + sa.Column('session_id', sa.String(length=255), nullable=True), + sa.Column('data', models.types.BinaryData(), nullable=True), + sa.Column('expiry', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('session_id') + ) + if _is_pg(conn): + op.create_table('sites', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('icon_background', sa.String(length=255), nullable=True), + sa.Column('description', sa.String(length=255), nullable=True), + sa.Column('default_language', sa.String(length=255), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=True), + sa.Column('privacy_policy', sa.String(length=255), nullable=True), + sa.Column('customize_domain', sa.String(length=255), nullable=True), + sa.Column('customize_token_strategy', sa.String(length=255), nullable=False), + sa.Column('prompt_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('code', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='site_pkey') + ) + else: + op.create_table('sites', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=True), + sa.Column('icon_background', sa.String(length=255), nullable=True), + sa.Column('description', sa.String(length=255), nullable=True), + sa.Column('default_language', sa.String(length=255), nullable=False), + sa.Column('copyright', sa.String(length=255), nullable=True), + sa.Column('privacy_policy', sa.String(length=255), nullable=True), + sa.Column('customize_domain', sa.String(length=255), nullable=True), + sa.Column('customize_token_strategy', sa.String(length=255), nullable=False), + sa.Column('prompt_public', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('code', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='site_pkey') + ) with op.batch_alter_table('sites', schema=None) as batch_op: batch_op.create_index('site_app_id_idx', ['app_id'], unique=False) batch_op.create_index('site_code_idx', ['code', 'status'], unique=False) - op.create_table('tenant_account_joins', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('role', sa.String(length=16), server_default='normal', nullable=False), - sa.Column('invited_by', postgresql.UUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tenant_account_join_pkey'), - sa.UniqueConstraint('tenant_id', 'account_id', name='unique_tenant_account_join') - ) + if _is_pg(conn): + op.create_table('tenant_account_joins', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('role', sa.String(length=16), server_default='normal', nullable=False), + sa.Column('invited_by', postgresql.UUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_account_join_pkey'), + sa.UniqueConstraint('tenant_id', 'account_id', name='unique_tenant_account_join') + ) + else: + op.create_table('tenant_account_joins', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('role', sa.String(length=16), server_default='normal', nullable=False), + sa.Column('invited_by', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_account_join_pkey'), + sa.UniqueConstraint('tenant_id', 'account_id', name='unique_tenant_account_join') + ) with op.batch_alter_table('tenant_account_joins', schema=None) as batch_op: batch_op.create_index('tenant_account_join_account_id_idx', ['account_id'], unique=False) batch_op.create_index('tenant_account_join_tenant_id_idx', ['tenant_id'], unique=False) - op.create_table('tenants', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('encrypt_public_key', sa.Text(), nullable=True), - sa.Column('plan', sa.String(length=255), server_default=sa.text("'basic'::character varying"), nullable=False), - sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tenant_pkey') - ) - op.create_table('upload_files', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('storage_type', sa.String(length=255), nullable=False), - sa.Column('key', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('size', sa.Integer(), nullable=False), - sa.Column('extension', sa.String(length=255), nullable=False), - sa.Column('mime_type', sa.String(length=255), nullable=True), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('used', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('used_by', postgresql.UUID(), nullable=True), - sa.Column('used_at', sa.DateTime(), nullable=True), - sa.Column('hash', sa.String(length=255), nullable=True), - sa.PrimaryKeyConstraint('id', name='upload_file_pkey') - ) + if _is_pg(conn): + op.create_table('tenants', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('encrypt_public_key', sa.Text(), nullable=True), + sa.Column('plan', sa.String(length=255), server_default=sa.text("'basic'::character varying"), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_pkey') + ) + else: + op.create_table('tenants', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('encrypt_public_key', models.types.LongText(), nullable=True), + sa.Column('plan', sa.String(length=255), server_default=sa.text("'basic'"), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tenant_pkey') + ) + if _is_pg(conn): + op.create_table('upload_files', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('storage_type', sa.String(length=255), nullable=False), + sa.Column('key', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('extension', sa.String(length=255), nullable=False), + sa.Column('mime_type', sa.String(length=255), nullable=True), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('used', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('used_by', postgresql.UUID(), nullable=True), + sa.Column('used_at', sa.DateTime(), nullable=True), + sa.Column('hash', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='upload_file_pkey') + ) + else: + op.create_table('upload_files', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('storage_type', sa.String(length=255), nullable=False), + sa.Column('key', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('size', sa.Integer(), nullable=False), + sa.Column('extension', sa.String(length=255), nullable=False), + sa.Column('mime_type', sa.String(length=255), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('used', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('used_by', models.types.StringUUID(), nullable=True), + sa.Column('used_at', sa.DateTime(), nullable=True), + sa.Column('hash', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name='upload_file_pkey') + ) with op.batch_alter_table('upload_files', schema=None) as batch_op: batch_op.create_index('upload_file_tenant_idx', ['tenant_id'], unique=False) - op.create_table('message_annotations', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('content', sa.Text(), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_annotation_pkey') - ) + if _is_pg(conn): + op.create_table('message_annotations', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_annotation_pkey') + ) + else: + op.create_table('message_annotations', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('content', models.types.LongText(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_annotation_pkey') + ) with op.batch_alter_table('message_annotations', schema=None) as batch_op: batch_op.create_index('message_annotation_app_idx', ['app_id'], unique=False) batch_op.create_index('message_annotation_conversation_idx', ['conversation_id'], unique=False) batch_op.create_index('message_annotation_message_idx', ['message_id'], unique=False) - op.create_table('messages', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('model_provider', sa.String(length=255), nullable=False), - sa.Column('model_id', sa.String(length=255), nullable=False), - sa.Column('override_model_configs', sa.Text(), nullable=True), - sa.Column('conversation_id', postgresql.UUID(), nullable=False), - sa.Column('inputs', sa.JSON(), nullable=True), - sa.Column('query', sa.Text(), nullable=False), - sa.Column('message', sa.JSON(), nullable=False), - sa.Column('message_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('message_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), - sa.Column('answer', sa.Text(), nullable=False), - sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), - sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), - sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), - sa.Column('currency', sa.String(length=255), nullable=False), - sa.Column('from_source', sa.String(length=255), nullable=False), - sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), - sa.Column('from_account_id', postgresql.UUID(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('agent_based', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_pkey') - ) + if _is_pg(conn): + op.create_table('messages', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('model_provider', sa.String(length=255), nullable=False), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('override_model_configs', sa.Text(), nullable=True), + sa.Column('conversation_id', postgresql.UUID(), nullable=False), + sa.Column('inputs', sa.JSON(), nullable=True), + sa.Column('query', sa.Text(), nullable=False), + sa.Column('message', sa.JSON(), nullable=False), + sa.Column('message_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('message_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('answer', sa.Text(), nullable=False), + sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), + sa.Column('currency', sa.String(length=255), nullable=False), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', postgresql.UUID(), nullable=True), + sa.Column('from_account_id', postgresql.UUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('agent_based', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_pkey') + ) + else: + op.create_table('messages', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('model_provider', sa.String(length=255), nullable=False), + sa.Column('model_id', sa.String(length=255), nullable=False), + sa.Column('override_model_configs', models.types.LongText(), nullable=True), + sa.Column('conversation_id', models.types.StringUUID(), nullable=False), + sa.Column('inputs', sa.JSON(), nullable=True), + sa.Column('query', models.types.LongText(), nullable=False), + sa.Column('message', sa.JSON(), nullable=False), + sa.Column('message_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('message_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('answer', models.types.LongText(), nullable=False), + sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), + sa.Column('currency', sa.String(length=255), nullable=False), + sa.Column('from_source', sa.String(length=255), nullable=False), + sa.Column('from_end_user_id', models.types.StringUUID(), nullable=True), + sa.Column('from_account_id', models.types.StringUUID(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('agent_based', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_pkey') + ) with op.batch_alter_table('messages', schema=None) as batch_op: batch_op.create_index('message_account_idx', ['app_id', 'from_source', 'from_account_id'], unique=False) batch_op.create_index('message_app_id_idx', ['app_id', 'created_at'], unique=False) @@ -764,8 +1359,12 @@ def downgrade(): op.drop_table('celery_tasksetmeta') op.drop_table('celery_taskmeta') - op.execute('DROP SEQUENCE taskset_id_sequence;') - op.execute('DROP SEQUENCE task_id_sequence;') + conn = op.get_bind() + if _is_pg(conn): + op.execute('DROP SEQUENCE taskset_id_sequence;') + op.execute('DROP SEQUENCE task_id_sequence;') + else: + pass with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.drop_index('app_tenant_id_idx') @@ -793,5 +1392,9 @@ def downgrade(): op.drop_table('accounts') op.drop_table('account_integrates') - op.execute('DROP EXTENSION IF EXISTS "uuid-ossp";') + conn = op.get_bind() + if _is_pg(conn): + op.execute('DROP EXTENSION IF EXISTS "uuid-ossp";') + else: + pass # ### end Alembic commands ### diff --git a/api/migrations/versions/6dcb43972bdc_add_dataset_retriever_resource.py b/api/migrations/versions/6dcb43972bdc_add_dataset_retriever_resource.py index da27dd4426..78fed540bc 100644 --- a/api/migrations/versions/6dcb43972bdc_add_dataset_retriever_resource.py +++ b/api/migrations/versions/6dcb43972bdc_add_dataset_retriever_resource.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '6dcb43972bdc' down_revision = '4bcffcd64aa4' @@ -18,27 +24,53 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('dataset_retriever_resources', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('position', sa.Integer(), nullable=False), - sa.Column('dataset_id', postgresql.UUID(), nullable=False), - sa.Column('dataset_name', sa.Text(), nullable=False), - sa.Column('document_id', postgresql.UUID(), nullable=False), - sa.Column('document_name', sa.Text(), nullable=False), - sa.Column('data_source_type', sa.Text(), nullable=False), - sa.Column('segment_id', postgresql.UUID(), nullable=False), - sa.Column('score', sa.Float(), nullable=True), - sa.Column('content', sa.Text(), nullable=False), - sa.Column('hit_count', sa.Integer(), nullable=True), - sa.Column('word_count', sa.Integer(), nullable=True), - sa.Column('segment_position', sa.Integer(), nullable=True), - sa.Column('index_node_hash', sa.Text(), nullable=True), - sa.Column('retriever_from', sa.Text(), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('dataset_retriever_resources', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('dataset_id', postgresql.UUID(), nullable=False), + sa.Column('dataset_name', sa.Text(), nullable=False), + sa.Column('document_id', postgresql.UUID(), nullable=False), + sa.Column('document_name', sa.Text(), nullable=False), + sa.Column('data_source_type', sa.Text(), nullable=False), + sa.Column('segment_id', postgresql.UUID(), nullable=False), + sa.Column('score', sa.Float(), nullable=True), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('hit_count', sa.Integer(), nullable=True), + sa.Column('word_count', sa.Integer(), nullable=True), + sa.Column('segment_position', sa.Integer(), nullable=True), + sa.Column('index_node_hash', sa.Text(), nullable=True), + sa.Column('retriever_from', sa.Text(), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey') + ) + else: + op.create_table('dataset_retriever_resources', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_name', models.types.LongText(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('document_name', models.types.LongText(), nullable=False), + sa.Column('data_source_type', models.types.LongText(), nullable=False), + sa.Column('segment_id', models.types.StringUUID(), nullable=False), + sa.Column('score', sa.Float(), nullable=True), + sa.Column('content', models.types.LongText(), nullable=False), + sa.Column('hit_count', sa.Integer(), nullable=True), + sa.Column('word_count', sa.Integer(), nullable=True), + sa.Column('segment_position', sa.Integer(), nullable=True), + sa.Column('index_node_hash', models.types.LongText(), nullable=True), + sa.Column('retriever_from', models.types.LongText(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey') + ) + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: batch_op.create_index('dataset_retriever_resource_message_id_idx', ['message_id'], unique=False) diff --git a/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py b/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py index 4fa322f693..1ace8ea5a0 100644 --- a/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py +++ b/api/migrations/versions/6e2cfb077b04_add_dataset_collection_binding.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '6e2cfb077b04' down_revision = '77e83833755c' @@ -18,19 +24,36 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('dataset_collection_bindings', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('model_name', sa.String(length=40), nullable=False), - sa.Column('collection_name', sa.String(length=64), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('dataset_collection_bindings', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('collection_name', sa.String(length=64), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey') + ) + else: + op.create_table('dataset_collection_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('model_name', sa.String(length=40), nullable=False), + sa.Column('collection_name', sa.String(length=64), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey') + ) + with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False) - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True)) + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True)) + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py b/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py index 498b46e3c4..457338ef42 100644 --- a/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py +++ b/api/migrations/versions/714aafe25d39_add_anntation_history_match_response.py @@ -8,6 +8,12 @@ Create Date: 2023-12-14 06:38:02.972527 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '714aafe25d39' down_revision = 'f2a6fc85e260' @@ -17,9 +23,16 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False)) - batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False)) + batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False)) + else: + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False)) + batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py b/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py index c5d8c3d88d..7bcd1a1be3 100644 --- a/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py +++ b/api/migrations/versions/77e83833755c_add_app_config_retriever_resource.py @@ -8,6 +8,12 @@ Create Date: 2023-09-06 17:26:40.311927 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '77e83833755c' down_revision = '6dcb43972bdc' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py b/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py index 2ba0e13caa..f1932fe76c 100644 --- a/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py +++ b/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '7b45942e39bb' down_revision = '4e99a8df00ff' @@ -19,44 +23,75 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('data_source_api_key_auth_bindings', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('category', sa.String(length=255), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('credentials', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), - sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('data_source_api_key_auth_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('credentials', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey') + ) + else: + # MySQL: Use compatible syntax + op.create_table('data_source_api_key_auth_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('credentials', models.types.LongText(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey') + ) + with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op: batch_op.create_index('data_source_api_key_auth_binding_provider_idx', ['provider'], unique=False) batch_op.create_index('data_source_api_key_auth_binding_tenant_id_idx', ['tenant_id'], unique=False) with op.batch_alter_table('data_source_bindings', schema=None) as batch_op: batch_op.drop_index('source_binding_tenant_id_idx') - batch_op.drop_index('source_info_idx') + if _is_pg(conn): + batch_op.drop_index('source_info_idx', postgresql_using='gin') + else: + pass op.rename_table('data_source_bindings', 'data_source_oauth_bindings') with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op: batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False) - batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin') + if _is_pg(conn): + batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin') + else: + pass # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op: - batch_op.drop_index('source_info_idx', postgresql_using='gin') + if _is_pg(conn): + batch_op.drop_index('source_info_idx', postgresql_using='gin') + else: + pass batch_op.drop_index('source_binding_tenant_id_idx') op.rename_table('data_source_oauth_bindings', 'data_source_bindings') with op.batch_alter_table('data_source_bindings', schema=None) as batch_op: - batch_op.create_index('source_info_idx', ['source_info'], unique=False) + if _is_pg(conn): + batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin') + else: + pass batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False) with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op: diff --git a/api/migrations/versions/7bdef072e63a_add_workflow_tool.py b/api/migrations/versions/7bdef072e63a_add_workflow_tool.py index f09a682f28..a0f4522cb3 100644 --- a/api/migrations/versions/7bdef072e63a_add_workflow_tool.py +++ b/api/migrations/versions/7bdef072e63a_add_workflow_tool.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '7bdef072e63a' down_revision = '5fda94355fce' @@ -19,21 +23,42 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_workflow_providers', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('name', sa.String(length=40), nullable=False), - sa.Column('icon', sa.String(length=255), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('user_id', models.types.StringUUID(), nullable=False), - sa.Column('tenant_id', models.types.StringUUID(), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'), - sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'), - sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id') - ) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('tool_workflow_providers', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'), + sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id') + ) + else: + # MySQL: Use compatible syntax + op.create_table('tool_workflow_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=40), nullable=False), + sa.Column('icon', sa.String(length=255), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('description', models.types.LongText(), nullable=False), + sa.Column('parameter_configuration', models.types.LongText(), default='[]', nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'), + sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'), + sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py b/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py index 881ffec61d..3c0aa082d5 100644 --- a/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py +++ b/api/migrations/versions/7ce5a52e4eee_add_tool_providers.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '7ce5a52e4eee' down_revision = '2beac44e5f5f' @@ -18,19 +24,40 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_providers', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('tool_name', sa.String(length=40), nullable=False), - sa.Column('encrypted_credentials', sa.Text(), nullable=True), - sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), - sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') - ) - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + op.create_table('tool_providers', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('encrypted_credentials', sa.Text(), nullable=True), + sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') + ) + else: + # MySQL: Use compatible syntax + op.create_table('tool_providers', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('encrypted_credentials', models.types.LongText(), nullable=True), + sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'), + sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name') + ) + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py b/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py index 865572f3a7..f8883d51ff 100644 --- a/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py +++ b/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py @@ -10,6 +10,10 @@ from alembic import op import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '7e6a8693e07a' down_revision = 'b2602e131636' @@ -19,14 +23,27 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('dataset_permissions', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('dataset_id', models.types.StringUUID(), nullable=False), - sa.Column('account_id', models.types.StringUUID(), nullable=False), - sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('dataset_permissions', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey') + ) + else: + op.create_table('dataset_permissions', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey') + ) + with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: batch_op.create_index('idx_dataset_permissions_account_id', ['account_id'], unique=False) batch_op.create_index('idx_dataset_permissions_dataset_id', ['dataset_id'], unique=False) diff --git a/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py b/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py index f7625bff8c..beea90b384 100644 --- a/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py +++ b/api/migrations/versions/88072f0caa04_add_custom_config_in_tenant.py @@ -8,6 +8,12 @@ Create Date: 2023-12-14 07:36:50.705362 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '88072f0caa04' down_revision = '246ba09cbbdb' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tenants', schema=None) as batch_op: - batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tenants', schema=None) as batch_op: + batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('tenants', schema=None) as batch_op: + batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/89c7899ca936_.py b/api/migrations/versions/89c7899ca936_.py index 0fad39fa57..2420710e74 100644 --- a/api/migrations/versions/89c7899ca936_.py +++ b/api/migrations/versions/89c7899ca936_.py @@ -8,6 +8,12 @@ Create Date: 2024-01-21 04:10:23.192853 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '89c7899ca936' down_revision = '187385f442fc' @@ -17,21 +23,39 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=sa.VARCHAR(length=255), - type_=sa.Text(), - existing_nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.VARCHAR(length=255), + type_=sa.Text(), + existing_nullable=True) + else: + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.VARCHAR(length=255), + type_=models.types.LongText(), + existing_nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('sites', schema=None) as batch_op: - batch_op.alter_column('description', - existing_type=sa.Text(), - type_=sa.VARCHAR(length=255), - existing_nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=sa.Text(), + type_=sa.VARCHAR(length=255), + existing_nullable=True) + else: + with op.batch_alter_table('sites', schema=None) as batch_op: + batch_op.alter_column('description', + existing_type=models.types.LongText(), + type_=sa.VARCHAR(length=255), + existing_nullable=True) # ### end Alembic commands ### diff --git a/api/migrations/versions/8d2d099ceb74_add_qa_model_support.py b/api/migrations/versions/8d2d099ceb74_add_qa_model_support.py index 849103b071..14e9cde727 100644 --- a/api/migrations/versions/8d2d099ceb74_add_qa_model_support.py +++ b/api/migrations/versions/8d2d099ceb74_add_qa_model_support.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '8d2d099ceb74' down_revision = '7ce5a52e4eee' @@ -18,13 +24,24 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('document_segments', schema=None) as batch_op: - batch_op.add_column(sa.Column('answer', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('updated_by', postgresql.UUID(), nullable=True)) - batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.add_column(sa.Column('answer', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('updated_by', postgresql.UUID(), nullable=True)) + batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) - with op.batch_alter_table('documents', schema=None) as batch_op: - batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'::character varying"), nullable=False)) + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'::character varying"), nullable=False)) + else: + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.add_column(sa.Column('answer', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), nullable=True)) + batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False)) + + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.add_column(sa.Column('doc_form', sa.String(length=255), server_default=sa.text("'text_model'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/8e5588e6412e_add_environment_variable_to_workflow_.py b/api/migrations/versions/8e5588e6412e_add_environment_variable_to_workflow_.py index ec2336da4d..f550f79b8e 100644 --- a/api/migrations/versions/8e5588e6412e_add_environment_variable_to_workflow_.py +++ b/api/migrations/versions/8e5588e6412e_add_environment_variable_to_workflow_.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '8e5588e6412e' down_revision = '6e957a32015b' @@ -19,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('workflows', schema=None) as batch_op: - batch_op.add_column(sa.Column('environment_variables', sa.Text(), server_default='{}', nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('environment_variables', sa.Text(), server_default='{}', nullable=False)) + else: + with op.batch_alter_table('workflows', schema=None) as batch_op: + batch_op.add_column(sa.Column('environment_variables', models.types.LongText(), default='{}', nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py b/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py index 6cafc198aa..111e81240b 100644 --- a/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py +++ b/api/migrations/versions/8ec536f3c800_rename_api_provider_credentails.py @@ -8,6 +8,12 @@ Create Date: 2024-01-07 03:57:35.257545 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '8ec536f3c800' down_revision = 'ad472b61a054' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credentials_str', sa.Text(), nullable=False)) + else: + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credentials_str', models.types.LongText(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py b/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py index 01d5631510..1c1c6cacbb 100644 --- a/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py +++ b/api/migrations/versions/8fe468ba0ca5_add_gpt4v_supports.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '8fe468ba0ca5' down_revision = 'a9836e3baeee' @@ -18,27 +24,52 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('message_files', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('message_id', postgresql.UUID(), nullable=False), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('transfer_method', sa.String(length=255), nullable=False), - sa.Column('url', sa.Text(), nullable=True), - sa.Column('upload_file_id', postgresql.UUID(), nullable=True), - sa.Column('created_by_role', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='message_file_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('message_files', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('message_id', postgresql.UUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('transfer_method', sa.String(length=255), nullable=False), + sa.Column('url', sa.Text(), nullable=True), + sa.Column('upload_file_id', postgresql.UUID(), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_file_pkey') + ) + else: + op.create_table('message_files', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('message_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('transfer_method', sa.String(length=255), nullable=False), + sa.Column('url', models.types.LongText(), nullable=True), + sa.Column('upload_file_id', models.types.StringUUID(), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='message_file_pkey') + ) + with op.batch_alter_table('message_files', schema=None) as batch_op: batch_op.create_index('message_file_created_by_idx', ['created_by'], unique=False) batch_op.create_index('message_file_message_idx', ['message_id'], unique=False) - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True)) + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('file_upload', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('file_upload', models.types.LongText(), nullable=True)) - with op.batch_alter_table('upload_files', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'::character varying"), nullable=False)) + if _is_pg(conn): + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'::character varying"), nullable=False)) + else: + with op.batch_alter_table('upload_files', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'account'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/968fff4c0ab9_add_api_based_extension.py b/api/migrations/versions/968fff4c0ab9_add_api_based_extension.py index 207a9c841f..c0ea28fe50 100644 --- a/api/migrations/versions/968fff4c0ab9_add_api_based_extension.py +++ b/api/migrations/versions/968fff4c0ab9_add_api_based_extension.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '968fff4c0ab9' down_revision = 'b3a09c049e8e' @@ -18,16 +24,28 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - - op.create_table('api_based_extensions', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('api_endpoint', sa.String(length=255), nullable=False), - sa.Column('api_key', sa.Text(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('api_based_extensions', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('api_endpoint', sa.String(length=255), nullable=False), + sa.Column('api_key', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey') + ) + else: + op.create_table('api_based_extensions', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('api_endpoint', sa.String(length=255), nullable=False), + sa.Column('api_key', models.types.LongText(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='api_based_extension_pkey') + ) with op.batch_alter_table('api_based_extensions', schema=None) as batch_op: batch_op.create_index('api_based_extension_tenant_idx', ['tenant_id'], unique=False) diff --git a/api/migrations/versions/9f4e3427ea84_add_created_by_role.py b/api/migrations/versions/9f4e3427ea84_add_created_by_role.py index c7a98b4ac6..5d29d354f3 100644 --- a/api/migrations/versions/9f4e3427ea84_add_created_by_role.py +++ b/api/migrations/versions/9f4e3427ea84_add_created_by_role.py @@ -8,6 +8,10 @@ Create Date: 2023-05-17 17:29:01.060435 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = '9f4e3427ea84' down_revision = '64b051264f32' @@ -17,15 +21,30 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False)) - batch_op.drop_index('pinned_conversation_conversation_idx') - batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False)) + batch_op.drop_index('pinned_conversation_conversation_idx') + batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False) - with op.batch_alter_table('saved_messages', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False)) - batch_op.drop_index('saved_message_message_idx') - batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False) + with op.batch_alter_table('saved_messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'::character varying"), nullable=False)) + batch_op.drop_index('saved_message_message_idx') + batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('pinned_conversations', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False)) + batch_op.drop_index('pinned_conversation_conversation_idx') + batch_op.create_index('pinned_conversation_conversation_idx', ['app_id', 'conversation_id', 'created_by_role', 'created_by'], unique=False) + + with op.batch_alter_table('saved_messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_by_role', sa.String(length=255), server_default=sa.text("'end_user'"), nullable=False)) + batch_op.drop_index('saved_message_message_idx') + batch_op.create_index('saved_message_message_idx', ['app_id', 'message_id', 'created_by_role', 'created_by'], unique=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/a45f4dfde53b_add_language_to_recommend_apps.py b/api/migrations/versions/a45f4dfde53b_add_language_to_recommend_apps.py index 3014978110..7e1e328317 100644 --- a/api/migrations/versions/a45f4dfde53b_add_language_to_recommend_apps.py +++ b/api/migrations/versions/a45f4dfde53b_add_language_to_recommend_apps.py @@ -8,6 +8,10 @@ Create Date: 2023-05-25 17:50:32.052335 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'a45f4dfde53b' down_revision = '9f4e3427ea84' @@ -17,10 +21,18 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('recommended_apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False)) - batch_op.drop_index('recommended_app_is_listed_idx') - batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False)) + batch_op.drop_index('recommended_app_is_listed_idx') + batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False) + else: + with op.batch_alter_table('recommended_apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'"), nullable=False)) + batch_op.drop_index('recommended_app_is_listed_idx') + batch_op.create_index('recommended_app_is_listed_idx', ['is_listed', 'language'], unique=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py b/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py index acb6812434..616cb2f163 100644 --- a/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py +++ b/api/migrations/versions/a5b56fb053ef_app_config_add_speech_to_text.py @@ -8,6 +8,12 @@ Create Date: 2023-07-06 17:55:20.894149 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'a5b56fb053ef' down_revision = 'd3d503a3471c' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('speech_to_text', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('speech_to_text', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/a8d7385a7b66_add_embeddings_provider_name.py b/api/migrations/versions/a8d7385a7b66_add_embeddings_provider_name.py index 1ee01381d8..77311061b0 100644 --- a/api/migrations/versions/a8d7385a7b66_add_embeddings_provider_name.py +++ b/api/migrations/versions/a8d7385a7b66_add_embeddings_provider_name.py @@ -8,6 +8,10 @@ Create Date: 2024-04-02 12:17:22.641525 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'a8d7385a7b66' down_revision = '17b5ab037c40' @@ -17,10 +21,18 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''::character varying"), nullable=False)) - batch_op.drop_constraint('embedding_hash_idx', type_='unique') - batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name']) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''::character varying"), nullable=False)) + batch_op.drop_constraint('embedding_hash_idx', type_='unique') + batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name']) + else: + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.add_column(sa.Column('provider_name', sa.String(length=40), server_default=sa.text("''"), nullable=False)) + batch_op.drop_constraint('embedding_hash_idx', type_='unique') + batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash', 'provider_name']) # ### end Alembic commands ### diff --git a/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py b/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py index 5dcb630aed..900ff78036 100644 --- a/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py +++ b/api/migrations/versions/a9836e3baeee_add_external_data_tools_in_app_model_.py @@ -8,6 +8,12 @@ Create Date: 2023-11-02 04:04:57.609485 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'a9836e3baeee' down_revision = '968fff4c0ab9' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('external_data_tools', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('external_data_tools', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/b24be59fbb04_.py b/api/migrations/versions/b24be59fbb04_.py index 29ba859f2b..b0a6d10d8c 100644 --- a/api/migrations/versions/b24be59fbb04_.py +++ b/api/migrations/versions/b24be59fbb04_.py @@ -8,6 +8,12 @@ Create Date: 2024-01-17 01:31:12.670556 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'b24be59fbb04' down_revision = 'de95f5c77138' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('text_to_speech', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('text_to_speech', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/b289e2408ee2_add_workflow.py b/api/migrations/versions/b289e2408ee2_add_workflow.py index 966f86c05f..ea50930eed 100644 --- a/api/migrations/versions/b289e2408ee2_add_workflow.py +++ b/api/migrations/versions/b289e2408ee2_add_workflow.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'b289e2408ee2' down_revision = 'a8d7385a7b66' @@ -18,98 +24,190 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('workflow_app_logs', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('workflow_id', postgresql.UUID(), nullable=False), - sa.Column('workflow_run_id', postgresql.UUID(), nullable=False), - sa.Column('created_from', sa.String(length=255), nullable=False), - sa.Column('created_by_role', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('workflow_app_logs', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('workflow_id', postgresql.UUID(), nullable=False), + sa.Column('workflow_run_id', postgresql.UUID(), nullable=False), + sa.Column('created_from', sa.String(length=255), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey') + ) + else: + op.create_table('workflow_app_logs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=False), + sa.Column('created_from', sa.String(length=255), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey') + ) with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op: batch_op.create_index('workflow_app_log_app_idx', ['tenant_id', 'app_id'], unique=False) - op.create_table('workflow_node_executions', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('workflow_id', postgresql.UUID(), nullable=False), - sa.Column('triggered_from', sa.String(length=255), nullable=False), - sa.Column('workflow_run_id', postgresql.UUID(), nullable=True), - sa.Column('index', sa.Integer(), nullable=False), - sa.Column('predecessor_node_id', sa.String(length=255), nullable=True), - sa.Column('node_id', sa.String(length=255), nullable=False), - sa.Column('node_type', sa.String(length=255), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('inputs', sa.Text(), nullable=True), - sa.Column('process_data', sa.Text(), nullable=True), - sa.Column('outputs', sa.Text(), nullable=True), - sa.Column('status', sa.String(length=255), nullable=False), - sa.Column('error', sa.Text(), nullable=True), - sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), - sa.Column('execution_metadata', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('created_by_role', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('finished_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey') - ) + if _is_pg(conn): + op.create_table('workflow_node_executions', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('workflow_id', postgresql.UUID(), nullable=False), + sa.Column('triggered_from', sa.String(length=255), nullable=False), + sa.Column('workflow_run_id', postgresql.UUID(), nullable=True), + sa.Column('index', sa.Integer(), nullable=False), + sa.Column('predecessor_node_id', sa.String(length=255), nullable=True), + sa.Column('node_id', sa.String(length=255), nullable=False), + sa.Column('node_type', sa.String(length=255), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('inputs', sa.Text(), nullable=True), + sa.Column('process_data', sa.Text(), nullable=True), + sa.Column('outputs', sa.Text(), nullable=True), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('execution_metadata', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey') + ) + else: + op.create_table('workflow_node_executions', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('triggered_from', sa.String(length=255), nullable=False), + sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True), + sa.Column('index', sa.Integer(), nullable=False), + sa.Column('predecessor_node_id', sa.String(length=255), nullable=True), + sa.Column('node_id', sa.String(length=255), nullable=False), + sa.Column('node_type', sa.String(length=255), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('inputs', models.types.LongText(), nullable=True), + sa.Column('process_data', models.types.LongText(), nullable=True), + sa.Column('outputs', models.types.LongText(), nullable=True), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('error', models.types.LongText(), nullable=True), + sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('execution_metadata', models.types.LongText(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey') + ) with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op: batch_op.create_index('workflow_node_execution_node_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'node_id'], unique=False) batch_op.create_index('workflow_node_execution_workflow_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'workflow_run_id'], unique=False) - op.create_table('workflow_runs', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('sequence_number', sa.Integer(), nullable=False), - sa.Column('workflow_id', postgresql.UUID(), nullable=False), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('triggered_from', sa.String(length=255), nullable=False), - sa.Column('version', sa.String(length=255), nullable=False), - sa.Column('graph', sa.Text(), nullable=True), - sa.Column('inputs', sa.Text(), nullable=True), - sa.Column('status', sa.String(length=255), nullable=False), - sa.Column('outputs', sa.Text(), nullable=True), - sa.Column('error', sa.Text(), nullable=True), - sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), - sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True), - sa.Column('created_by_role', sa.String(length=255), nullable=False), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('finished_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id', name='workflow_run_pkey') - ) + if _is_pg(conn): + op.create_table('workflow_runs', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('sequence_number', sa.Integer(), nullable=False), + sa.Column('workflow_id', postgresql.UUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('triggered_from', sa.String(length=255), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('graph', sa.Text(), nullable=True), + sa.Column('inputs', sa.Text(), nullable=True), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('outputs', sa.Text(), nullable=True), + sa.Column('error', sa.Text(), nullable=True), + sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_run_pkey') + ) + else: + op.create_table('workflow_runs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('sequence_number', sa.Integer(), nullable=False), + sa.Column('workflow_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('triggered_from', sa.String(length=255), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('graph', models.types.LongText(), nullable=True), + sa.Column('inputs', models.types.LongText(), nullable=True), + sa.Column('status', sa.String(length=255), nullable=False), + sa.Column('outputs', models.types.LongText(), nullable=True), + sa.Column('error', models.types.LongText(), nullable=True), + sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True), + sa.Column('created_by_role', sa.String(length=255), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('finished_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_run_pkey') + ) with op.batch_alter_table('workflow_runs', schema=None) as batch_op: batch_op.create_index('workflow_run_triggerd_from_idx', ['tenant_id', 'app_id', 'triggered_from'], unique=False) - op.create_table('workflows', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('type', sa.String(length=255), nullable=False), - sa.Column('version', sa.String(length=255), nullable=False), - sa.Column('graph', sa.Text(), nullable=True), - sa.Column('features', sa.Text(), nullable=True), - sa.Column('created_by', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_by', postgresql.UUID(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('id', name='workflow_pkey') - ) + if _is_pg(conn): + op.create_table('workflows', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('graph', sa.Text(), nullable=True), + sa.Column('features', sa.Text(), nullable=True), + sa.Column('created_by', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', postgresql.UUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_pkey') + ) + else: + op.create_table('workflows', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('graph', models.types.LongText(), nullable=True), + sa.Column('features', models.types.LongText(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name='workflow_pkey') + ) + with op.batch_alter_table('workflows', schema=None) as batch_op: batch_op.create_index('workflow_version_idx', ['tenant_id', 'app_id', 'version'], unique=False) - with op.batch_alter_table('apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('workflow_id', postgresql.UUID(), nullable=True)) + if _is_pg(conn): + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('workflow_id', postgresql.UUID(), nullable=True)) - with op.batch_alter_table('messages', schema=None) as batch_op: - batch_op.add_column(sa.Column('workflow_run_id', postgresql.UUID(), nullable=True)) + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('workflow_run_id', postgresql.UUID(), nullable=True)) + else: + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('workflow_id', models.types.StringUUID(), nullable=True)) + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py b/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py index 5682eff030..772395c25b 100644 --- a/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py +++ b/api/migrations/versions/b3a09c049e8e_add_advanced_prompt_templates.py @@ -8,6 +8,12 @@ Create Date: 2023-10-10 15:23:23.395420 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'b3a09c049e8e' down_revision = '2e9819ca5b28' @@ -17,11 +23,20 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) - batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) + batch_op.add_column(sa.Column('chat_prompt_config', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('completion_prompt_config', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('dataset_configs', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('prompt_type', sa.String(length=255), nullable=False, server_default='simple')) + batch_op.add_column(sa.Column('chat_prompt_config', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('completion_prompt_config', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('dataset_configs', models.types.LongText(), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/bf0aec5ba2cf_add_provider_order.py b/api/migrations/versions/bf0aec5ba2cf_add_provider_order.py index dfa1517462..32736f41ca 100644 --- a/api/migrations/versions/bf0aec5ba2cf_add_provider_order.py +++ b/api/migrations/versions/bf0aec5ba2cf_add_provider_order.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'bf0aec5ba2cf' down_revision = 'e35ed59becda' @@ -18,25 +24,48 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('provider_orders', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider_name', sa.String(length=40), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('payment_product_id', sa.String(length=191), nullable=False), - sa.Column('payment_id', sa.String(length=191), nullable=True), - sa.Column('transaction_id', sa.String(length=191), nullable=True), - sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False), - sa.Column('currency', sa.String(length=40), nullable=True), - sa.Column('total_amount', sa.Integer(), nullable=True), - sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'::character varying"), nullable=False), - sa.Column('paid_at', sa.DateTime(), nullable=True), - sa.Column('pay_failed_at', sa.DateTime(), nullable=True), - sa.Column('refunded_at', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='provider_order_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('provider_orders', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('payment_product_id', sa.String(length=191), nullable=False), + sa.Column('payment_id', sa.String(length=191), nullable=True), + sa.Column('transaction_id', sa.String(length=191), nullable=True), + sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False), + sa.Column('currency', sa.String(length=40), nullable=True), + sa.Column('total_amount', sa.Integer(), nullable=True), + sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'::character varying"), nullable=False), + sa.Column('paid_at', sa.DateTime(), nullable=True), + sa.Column('pay_failed_at', sa.DateTime(), nullable=True), + sa.Column('refunded_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_order_pkey') + ) + else: + op.create_table('provider_orders', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=40), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('payment_product_id', sa.String(length=191), nullable=False), + sa.Column('payment_id', sa.String(length=191), nullable=True), + sa.Column('transaction_id', sa.String(length=191), nullable=True), + sa.Column('quantity', sa.Integer(), server_default=sa.text('1'), nullable=False), + sa.Column('currency', sa.String(length=40), nullable=True), + sa.Column('total_amount', sa.Integer(), nullable=True), + sa.Column('payment_status', sa.String(length=40), server_default=sa.text("'wait_pay'"), nullable=False), + sa.Column('paid_at', sa.DateTime(), nullable=True), + sa.Column('pay_failed_at', sa.DateTime(), nullable=True), + sa.Column('refunded_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_order_pkey') + ) with op.batch_alter_table('provider_orders', schema=None) as batch_op: batch_op.create_index('provider_order_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False) diff --git a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py index f87819c367..76be794ff4 100644 --- a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py +++ b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py @@ -11,6 +11,10 @@ from sqlalchemy.dialects import postgresql import models.types + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'c031d46af369' down_revision = '04c602f5dc9b' @@ -20,16 +24,30 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('trace_app_config', - sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', models.types.StringUUID(), nullable=False), - sa.Column('tracing_provider', sa.String(length=255), nullable=True), - sa.Column('tracing_config', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), - sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), - sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('trace_app_config', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') + ) + else: + op.create_table('trace_app_config', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False), + sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), + sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') + ) with op.batch_alter_table('trace_app_config', schema=None) as batch_op: batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) diff --git a/api/migrations/versions/c3311b089690_add_tool_meta.py b/api/migrations/versions/c3311b089690_add_tool_meta.py index e075535b0d..79f80f5553 100644 --- a/api/migrations/versions/c3311b089690_add_tool_meta.py +++ b/api/migrations/versions/c3311b089690_add_tool_meta.py @@ -8,6 +8,12 @@ Create Date: 2024-03-28 11:50:45.364875 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'c3311b089690' down_revision = 'e2eacc9a1b63' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: - batch_op.add_column(sa.Column('tool_meta_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_meta_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False)) + else: + with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op: + batch_op.add_column(sa.Column('tool_meta_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/c71211c8f604_add_tool_invoke_model_log.py b/api/migrations/versions/c71211c8f604_add_tool_invoke_model_log.py index 95fb8f5d0e..e3e818d2a7 100644 --- a/api/migrations/versions/c71211c8f604_add_tool_invoke_model_log.py +++ b/api/migrations/versions/c71211c8f604_add_tool_invoke_model_log.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'c71211c8f604' down_revision = 'f25003750af4' @@ -18,28 +24,54 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('tool_model_invokes', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('user_id', postgresql.UUID(), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('provider', sa.String(length=40), nullable=False), - sa.Column('tool_type', sa.String(length=40), nullable=False), - sa.Column('tool_name', sa.String(length=40), nullable=False), - sa.Column('tool_id', postgresql.UUID(), nullable=False), - sa.Column('model_parameters', sa.Text(), nullable=False), - sa.Column('prompt_messages', sa.Text(), nullable=False), - sa.Column('model_response', sa.Text(), nullable=False), - sa.Column('prompt_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), - sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), - sa.Column('answer_price_unit', sa.Numeric(precision=10, scale=7), server_default=sa.text('0.001'), nullable=False), - sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), - sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), - sa.Column('currency', sa.String(length=255), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='tool_model_invoke_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('tool_model_invokes', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('user_id', postgresql.UUID(), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('provider', sa.String(length=40), nullable=False), + sa.Column('tool_type', sa.String(length=40), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('tool_id', postgresql.UUID(), nullable=False), + sa.Column('model_parameters', sa.Text(), nullable=False), + sa.Column('prompt_messages', sa.Text(), nullable=False), + sa.Column('model_response', sa.Text(), nullable=False), + sa.Column('prompt_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('answer_price_unit', sa.Numeric(precision=10, scale=7), server_default=sa.text('0.001'), nullable=False), + sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), + sa.Column('currency', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_model_invoke_pkey') + ) + else: + op.create_table('tool_model_invokes', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider', sa.String(length=40), nullable=False), + sa.Column('tool_type', sa.String(length=40), nullable=False), + sa.Column('tool_name', sa.String(length=40), nullable=False), + sa.Column('tool_id', models.types.StringUUID(), nullable=False), + sa.Column('model_parameters', models.types.LongText(), nullable=False), + sa.Column('prompt_messages', models.types.LongText(), nullable=False), + sa.Column('model_response', models.types.LongText(), nullable=False), + sa.Column('prompt_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False), + sa.Column('answer_unit_price', sa.Numeric(precision=10, scale=4), nullable=False), + sa.Column('answer_price_unit', sa.Numeric(precision=10, scale=7), server_default=sa.text('0.001'), nullable=False), + sa.Column('provider_response_latency', sa.Float(), server_default=sa.text('0'), nullable=False), + sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True), + sa.Column('currency', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='tool_model_invoke_pkey') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/cc04d0998d4d_set_model_config_column_nullable.py b/api/migrations/versions/cc04d0998d4d_set_model_config_column_nullable.py index aefbe43f14..2b9f0e90a4 100644 --- a/api/migrations/versions/cc04d0998d4d_set_model_config_column_nullable.py +++ b/api/migrations/versions/cc04d0998d4d_set_model_config_column_nullable.py @@ -9,6 +9,10 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'cc04d0998d4d' down_revision = 'b289e2408ee2' @@ -18,16 +22,30 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.alter_column('provider', - existing_type=sa.VARCHAR(length=255), - nullable=True) - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=True) - batch_op.alter_column('configs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=True) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('configs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=True) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=True) + batch_op.alter_column('configs', + existing_type=sa.JSON(), + nullable=True) with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.alter_column('api_rpm', @@ -45,6 +63,8 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + with op.batch_alter_table('apps', schema=None) as batch_op: batch_op.alter_column('api_rpm', existing_type=sa.Integer(), @@ -56,15 +76,27 @@ def downgrade(): server_default=None, nullable=False) - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.alter_column('configs', - existing_type=postgresql.JSON(astext_type=sa.Text()), - nullable=False) - batch_op.alter_column('model_id', - existing_type=sa.VARCHAR(length=255), - nullable=False) - batch_op.alter_column('provider', - existing_type=sa.VARCHAR(length=255), - nullable=False) + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.alter_column('configs', + existing_type=postgresql.JSON(astext_type=sa.Text()), + nullable=False) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=255), + nullable=False) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.alter_column('configs', + existing_type=sa.JSON(), + nullable=False) + batch_op.alter_column('model_id', + existing_type=sa.VARCHAR(length=255), + nullable=False) + batch_op.alter_column('provider', + existing_type=sa.VARCHAR(length=255), + nullable=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/e1901f623fd0_add_annotation_reply.py b/api/migrations/versions/e1901f623fd0_add_annotation_reply.py index 32902c8eb0..9e02ec5d84 100644 --- a/api/migrations/versions/e1901f623fd0_add_annotation_reply.py +++ b/api/migrations/versions/e1901f623fd0_add_annotation_reply.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'e1901f623fd0' down_revision = 'fca025d3b60f' @@ -18,51 +24,98 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('app_annotation_hit_histories', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('annotation_id', postgresql.UUID(), nullable=False), - sa.Column('source', sa.Text(), nullable=False), - sa.Column('question', sa.Text(), nullable=False), - sa.Column('account_id', postgresql.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.PrimaryKeyConstraint('id', name='app_annotation_hit_histories_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('app_annotation_hit_histories', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('annotation_id', postgresql.UUID(), nullable=False), + sa.Column('source', sa.Text(), nullable=False), + sa.Column('question', sa.Text(), nullable=False), + sa.Column('account_id', postgresql.UUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_annotation_hit_histories_pkey') + ) + else: + op.create_table('app_annotation_hit_histories', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('annotation_id', models.types.StringUUID(), nullable=False), + sa.Column('source', models.types.LongText(), nullable=False), + sa.Column('question', models.types.LongText(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.PrimaryKeyConstraint('id', name='app_annotation_hit_histories_pkey') + ) + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: batch_op.create_index('app_annotation_hit_histories_account_idx', ['account_id'], unique=False) batch_op.create_index('app_annotation_hit_histories_annotation_idx', ['annotation_id'], unique=False) batch_op.create_index('app_annotation_hit_histories_app_idx', ['app_id'], unique=False) - with op.batch_alter_table('app_model_configs', schema=None) as batch_op: - batch_op.add_column(sa.Column('annotation_reply', sa.Text(), nullable=True)) + if _is_pg(conn): + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', sa.Text(), nullable=True)) + else: + with op.batch_alter_table('app_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), nullable=True)) - with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: - batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'::character varying"), nullable=False)) + if _is_pg(conn): + with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: + batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'::character varying"), nullable=False)) + else: + with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: + batch_op.add_column(sa.Column('type', sa.String(length=40), server_default=sa.text("'dataset'"), nullable=False)) - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.add_column(sa.Column('question', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=True) - batch_op.alter_column('message_id', - existing_type=postgresql.UUID(), - nullable=True) + if _is_pg(conn): + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.add_column(sa.Column('question', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) + batch_op.alter_column('conversation_id', + existing_type=postgresql.UUID(), + nullable=True) + batch_op.alter_column('message_id', + existing_type=postgresql.UUID(), + nullable=True) + else: + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.add_column(sa.Column('question', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('hit_count', sa.Integer(), server_default=sa.text('0'), nullable=False)) + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=True) + batch_op.alter_column('message_id', + existing_type=models.types.StringUUID(), + nullable=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('message_annotations', schema=None) as batch_op: - batch_op.alter_column('message_id', - existing_type=postgresql.UUID(), - nullable=False) - batch_op.alter_column('conversation_id', - existing_type=postgresql.UUID(), - nullable=False) - batch_op.drop_column('hit_count') - batch_op.drop_column('question') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.alter_column('message_id', + existing_type=postgresql.UUID(), + nullable=False) + batch_op.alter_column('conversation_id', + existing_type=postgresql.UUID(), + nullable=False) + batch_op.drop_column('hit_count') + batch_op.drop_column('question') + else: + with op.batch_alter_table('message_annotations', schema=None) as batch_op: + batch_op.alter_column('message_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.alter_column('conversation_id', + existing_type=models.types.StringUUID(), + nullable=False) + batch_op.drop_column('hit_count') + batch_op.drop_column('question') with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.drop_column('type') diff --git a/api/migrations/versions/e2eacc9a1b63_add_status_for_message.py b/api/migrations/versions/e2eacc9a1b63_add_status_for_message.py index 08f994a41f..0eeb68360e 100644 --- a/api/migrations/versions/e2eacc9a1b63_add_status_for_message.py +++ b/api/migrations/versions/e2eacc9a1b63_add_status_for_message.py @@ -8,6 +8,12 @@ Create Date: 2024-03-21 09:31:27.342221 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'e2eacc9a1b63' down_revision = '563cf8bf777b' @@ -17,14 +23,23 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + with op.batch_alter_table('conversations', schema=None) as batch_op: batch_op.add_column(sa.Column('invoke_from', sa.String(length=255), nullable=True)) - with op.batch_alter_table('messages', schema=None) as batch_op: - batch_op.add_column(sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False)) - batch_op.add_column(sa.Column('error', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('message_metadata', sa.Text(), nullable=True)) - batch_op.add_column(sa.Column('invoke_from', sa.String(length=255), nullable=True)) + if _is_pg(conn): + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False)) + batch_op.add_column(sa.Column('error', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('message_metadata', sa.Text(), nullable=True)) + batch_op.add_column(sa.Column('invoke_from', sa.String(length=255), nullable=True)) + else: + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False)) + batch_op.add_column(sa.Column('error', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('message_metadata', models.types.LongText(), nullable=True)) + batch_op.add_column(sa.Column('invoke_from', sa.String(length=255), nullable=True)) # ### end Alembic commands ### diff --git a/api/migrations/versions/e32f6ccb87c6_e08af0a69ccefbb59fa80c778efee300bb780980.py b/api/migrations/versions/e32f6ccb87c6_e08af0a69ccefbb59fa80c778efee300bb780980.py index 3d7dd1fabf..c52605667b 100644 --- a/api/migrations/versions/e32f6ccb87c6_e08af0a69ccefbb59fa80c778efee300bb780980.py +++ b/api/migrations/versions/e32f6ccb87c6_e08af0a69ccefbb59fa80c778efee300bb780980.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'e32f6ccb87c6' down_revision = '614f77cecc48' @@ -18,28 +24,52 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('data_source_bindings', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', postgresql.UUID(), nullable=False), - sa.Column('access_token', sa.String(length=255), nullable=False), - sa.Column('provider', sa.String(length=255), nullable=False), - sa.Column('source_info', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), - sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), - sa.PrimaryKeyConstraint('id', name='source_binding_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table('data_source_bindings', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', postgresql.UUID(), nullable=False), + sa.Column('access_token', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('source_info', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.PrimaryKeyConstraint('id', name='source_binding_pkey') + ) + else: + op.create_table('data_source_bindings', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('access_token', sa.String(length=255), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('source_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False), + sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True), + sa.PrimaryKeyConstraint('id', name='source_binding_pkey') + ) + with op.batch_alter_table('data_source_bindings', schema=None) as batch_op: batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False) - batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin') + if _is_pg(conn): + batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin') + else: + pass # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + with op.batch_alter_table('data_source_bindings', schema=None) as batch_op: - batch_op.drop_index('source_info_idx', postgresql_using='gin') + if _is_pg(conn): + batch_op.drop_index('source_info_idx', postgresql_using='gin') + else: + pass batch_op.drop_index('source_binding_tenant_id_idx') op.drop_table('data_source_bindings') diff --git a/api/migrations/versions/e8883b0148c9_add_dataset_model_name.py b/api/migrations/versions/e8883b0148c9_add_dataset_model_name.py index 875683d68e..b7bb0dd4df 100644 --- a/api/migrations/versions/e8883b0148c9_add_dataset_model_name.py +++ b/api/migrations/versions/e8883b0148c9_add_dataset_model_name.py @@ -8,6 +8,10 @@ Create Date: 2023-08-15 20:54:58.936787 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'e8883b0148c9' down_revision = '2c8af9671032' @@ -17,9 +21,18 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('embedding_model', sa.String(length=255), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False)) - batch_op.add_column(sa.Column('embedding_model_provider', sa.String(length=255), server_default=sa.text("'openai'::character varying"), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('embedding_model', sa.String(length=255), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False)) + batch_op.add_column(sa.Column('embedding_model_provider', sa.String(length=255), server_default=sa.text("'openai'::character varying"), nullable=False)) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('embedding_model', sa.String(length=255), server_default=sa.text("'text-embedding-ada-002'"), nullable=False)) + batch_op.add_column(sa.Column('embedding_model_provider', sa.String(length=255), server_default=sa.text("'openai'"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/eeb2e349e6ac_increase_max_model_name_length.py b/api/migrations/versions/eeb2e349e6ac_increase_max_model_name_length.py index 434531b6c8..6125744a1f 100644 --- a/api/migrations/versions/eeb2e349e6ac_increase_max_model_name_length.py +++ b/api/migrations/versions/eeb2e349e6ac_increase_max_model_name_length.py @@ -10,6 +10,10 @@ from alembic import op import models as models + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'eeb2e349e6ac' down_revision = '53bf8af60645' @@ -19,30 +23,50 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.alter_column('model_name', existing_type=sa.VARCHAR(length=40), type_=sa.String(length=255), existing_nullable=False) - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.alter_column('model_name', - existing_type=sa.VARCHAR(length=40), - type_=sa.String(length=255), - existing_nullable=False, - existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + if _is_pg(conn): + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('model_name', + existing_type=sa.VARCHAR(length=40), + type_=sa.String(length=255), + existing_nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + else: + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('model_name', + existing_type=sa.VARCHAR(length=40), + type_=sa.String(length=255), + existing_nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'")) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('embeddings', schema=None) as batch_op: - batch_op.alter_column('model_name', - existing_type=sa.String(length=255), - type_=sa.VARCHAR(length=40), - existing_nullable=False, - existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('model_name', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=40), + existing_nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'::character varying")) + else: + with op.batch_alter_table('embeddings', schema=None) as batch_op: + batch_op.alter_column('model_name', + existing_type=sa.String(length=255), + type_=sa.VARCHAR(length=40), + existing_nullable=False, + existing_server_default=sa.text("'text-embedding-ada-002'")) with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op: batch_op.alter_column('model_name', diff --git a/api/migrations/versions/f25003750af4_add_created_updated_at.py b/api/migrations/versions/f25003750af4_add_created_updated_at.py index 178eaf2380..f2752dfbb7 100644 --- a/api/migrations/versions/f25003750af4_add_created_updated_at.py +++ b/api/migrations/versions/f25003750af4_add_created_updated_at.py @@ -8,6 +8,10 @@ Create Date: 2024-01-07 04:53:24.441861 import sqlalchemy as sa from alembic import op + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'f25003750af4' down_revision = '00bacef91f18' @@ -17,9 +21,18 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: - batch_op.add_column(sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) - batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + # PostgreSQL: Keep original syntax + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False)) + else: + # MySQL: Use compatible syntax + with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False)) + batch_op.add_column(sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py b/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py index dc9392a92c..02098e91c1 100644 --- a/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py +++ b/api/migrations/versions/f2a6fc85e260_add_anntation_history_message_id.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'f2a6fc85e260' down_revision = '46976cc39132' @@ -18,9 +24,16 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: - batch_op.add_column(sa.Column('message_id', postgresql.UUID(), nullable=False)) - batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_id', postgresql.UUID(), nullable=False)) + batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) + else: + with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op: + batch_op.add_column(sa.Column('message_id', models.types.StringUUID(), nullable=False)) + batch_op.create_index('app_annotation_hit_histories_message_idx', ['message_id'], unique=False) # ### end Alembic commands ### diff --git a/api/migrations/versions/f9107f83abab_add_desc_for_apps.py b/api/migrations/versions/f9107f83abab_add_desc_for_apps.py index 3e5ae0d67d..8a3f479217 100644 --- a/api/migrations/versions/f9107f83abab_add_desc_for_apps.py +++ b/api/migrations/versions/f9107f83abab_add_desc_for_apps.py @@ -8,6 +8,12 @@ Create Date: 2024-02-28 08:16:14.090481 import sqlalchemy as sa from alembic import op +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'f9107f83abab' down_revision = 'cc04d0998d4d' @@ -17,8 +23,14 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('apps', schema=None) as batch_op: - batch_op.add_column(sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False)) + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False)) + else: + with op.batch_alter_table('apps', schema=None) as batch_op: + batch_op.add_column(sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False)) # ### end Alembic commands ### diff --git a/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py b/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py index 52495be60a..4a13133c1c 100644 --- a/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py +++ b/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'fca025d3b60f' down_revision = '8fe468ba0ca5' @@ -18,26 +24,48 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + op.drop_table('sessions') - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.add_column(sa.Column('retrieval_model', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) - batch_op.create_index('retrieval_model_idx', ['retrieval_model'], unique=False, postgresql_using='gin') + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('retrieval_model', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + batch_op.create_index('retrieval_model_idx', ['retrieval_model'], unique=False, postgresql_using='gin') + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('retrieval_model', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - with op.batch_alter_table('datasets', schema=None) as batch_op: - batch_op.drop_index('retrieval_model_idx', postgresql_using='gin') - batch_op.drop_column('retrieval_model') + conn = op.get_bind() + + if _is_pg(conn): + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.drop_index('retrieval_model_idx', postgresql_using='gin') + batch_op.drop_column('retrieval_model') + else: + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.drop_column('retrieval_model') - op.create_table('sessions', - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('data', postgresql.BYTEA(), autoincrement=False, nullable=True), - sa.Column('expiry', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', name='sessions_pkey'), - sa.UniqueConstraint('session_id', name='sessions_session_id_key') - ) + if _is_pg(conn): + op.create_table('sessions', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('data', postgresql.BYTEA(), autoincrement=False, nullable=True), + sa.Column('expiry', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='sessions_pkey'), + sa.UniqueConstraint('session_id', name='sessions_session_id_key') + ) + else: + op.create_table('sessions', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('session_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True), + sa.Column('data', models.types.BinaryData(), autoincrement=False, nullable=True), + sa.Column('expiry', sa.TIMESTAMP(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='sessions_pkey'), + sa.UniqueConstraint('session_id', name='sessions_session_id_key') + ) # ### end Alembic commands ### diff --git a/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table.py b/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table.py index 6f76a361d9..ab84ec0d87 100644 --- a/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table.py +++ b/api/migrations/versions/fecff1c3da27_remove_extra_tracing_app_config_table.py @@ -9,6 +9,12 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql +import models.types + + +def _is_pg(conn): + return conn.dialect.name == "postgresql" + # revision identifiers, used by Alembic. revision = 'fecff1c3da27' down_revision = '408176b91ad3' @@ -29,20 +35,38 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - 'tracing_app_configs', - sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', postgresql.UUID(), nullable=False), - sa.Column('tracing_provider', sa.String(length=255), nullable=True), - sa.Column('tracing_config', postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column( - 'created_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False - ), - sa.Column( - 'updated_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False - ), - sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') - ) + conn = op.get_bind() + + if _is_pg(conn): + op.create_table( + 'tracing_app_configs', + sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', postgresql.UUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', postgresql.JSON(astext_type=sa.Text()), nullable=True), + sa.Column( + 'created_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False + ), + sa.Column( + 'updated_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False + ), + sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') + ) + else: + op.create_table( + 'tracing_app_configs', + sa.Column('id', models.types.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('tracing_provider', sa.String(length=255), nullable=True), + sa.Column('tracing_config', sa.JSON(), nullable=True), + sa.Column( + 'created_at', sa.TIMESTAMP(), server_default=sa.func.now(), autoincrement=False, nullable=False + ), + sa.Column( + 'updated_at', sa.TIMESTAMP(), server_default=sa.func.now(), autoincrement=False, nullable=False + ), + sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') + ) with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: batch_op.drop_index('idx_dataset_permissions_tenant_id') diff --git a/api/models/account.py b/api/models/account.py index dc3f2094fd..615883f01a 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -3,6 +3,7 @@ import json from dataclasses import field from datetime import datetime from typing import Any, Optional +from uuid import uuid4 import sqlalchemy as sa from flask_login import UserMixin @@ -13,7 +14,7 @@ from typing_extensions import deprecated from models.base import TypeBase from .engine import db -from .types import StringUUID +from .types import LongText, StringUUID class TenantAccountRole(enum.StrEnum): @@ -88,7 +89,7 @@ class Account(UserMixin, TypeBase): __tablename__ = "accounts" __table_args__ = (sa.PrimaryKeyConstraint("id", name="account_pkey"), sa.Index("account_email_idx", "email")) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) name: Mapped[str] = mapped_column(String(255)) email: Mapped[str] = mapped_column(String(255)) password: Mapped[str | None] = mapped_column(String(255), default=None) @@ -102,9 +103,7 @@ class Account(UserMixin, TypeBase): last_active_at: Mapped[datetime] = mapped_column( DateTime, server_default=func.current_timestamp(), nullable=False, init=False ) - status: Mapped[str] = mapped_column( - String(16), server_default=sa.text("'active'::character varying"), default="active" - ) + status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'active'"), default="active") initialized_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) created_at: Mapped[datetime] = mapped_column( DateTime, server_default=func.current_timestamp(), nullable=False, init=False @@ -237,16 +236,12 @@ class Tenant(TypeBase): __tablename__ = "tenants" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tenant_pkey"),) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) name: Mapped[str] = mapped_column(String(255)) - encrypt_public_key: Mapped[str | None] = mapped_column(sa.Text, default=None) - plan: Mapped[str] = mapped_column( - String(255), server_default=sa.text("'basic'::character varying"), default="basic" - ) - status: Mapped[str] = mapped_column( - String(255), server_default=sa.text("'normal'::character varying"), default="normal" - ) - custom_config: Mapped[str | None] = mapped_column(sa.Text, default=None) + encrypt_public_key: Mapped[str | None] = mapped_column(LongText, default=None) + plan: Mapped[str] = mapped_column(String(255), server_default=sa.text("'basic'"), default="basic") + status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'"), default="normal") + custom_config: Mapped[str | None] = mapped_column(LongText, default=None) created_at: Mapped[datetime] = mapped_column( DateTime, server_default=func.current_timestamp(), nullable=False, init=False ) @@ -281,7 +276,7 @@ class TenantAccountJoin(TypeBase): sa.UniqueConstraint("tenant_id", "account_id", name="unique_tenant_account_join"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID) account_id: Mapped[str] = mapped_column(StringUUID) current: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false"), default=False) @@ -303,7 +298,7 @@ class AccountIntegrate(TypeBase): sa.UniqueConstraint("provider", "open_id", name="unique_provider_open_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) account_id: Mapped[str] = mapped_column(StringUUID) provider: Mapped[str] = mapped_column(String(16)) open_id: Mapped[str] = mapped_column(String(255)) @@ -327,15 +322,13 @@ class InvitationCode(TypeBase): id: Mapped[int] = mapped_column(sa.Integer, init=False) batch: Mapped[str] = mapped_column(String(255)) code: Mapped[str] = mapped_column(String(32)) - status: Mapped[str] = mapped_column( - String(16), server_default=sa.text("'unused'::character varying"), default="unused" - ) + status: Mapped[str] = mapped_column(String(16), server_default=sa.text("'unused'"), default="unused") used_at: Mapped[datetime | None] = mapped_column(DateTime, default=None) used_by_tenant_id: Mapped[str | None] = mapped_column(StringUUID, default=None) used_by_account_id: Mapped[str | None] = mapped_column(StringUUID, default=None) deprecated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) created_at: Mapped[datetime] = mapped_column( - DateTime, server_default=sa.text("CURRENT_TIMESTAMP(0)"), nullable=False, init=False + DateTime, server_default=sa.func.current_timestamp(), nullable=False, init=False ) @@ -356,7 +349,7 @@ class TenantPluginPermission(TypeBase): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) install_permission: Mapped[InstallPermission] = mapped_column( String(16), nullable=False, server_default="everyone", default=InstallPermission.EVERYONE @@ -383,7 +376,7 @@ class TenantPluginAutoUpgradeStrategy(TypeBase): sa.UniqueConstraint("tenant_id", name="unique_tenant_plugin_auto_upgrade_strategy"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) strategy_setting: Mapped[StrategySetting] = mapped_column( String(16), nullable=False, server_default="fix_only", default=StrategySetting.FIX_ONLY @@ -391,8 +384,8 @@ class TenantPluginAutoUpgradeStrategy(TypeBase): upgrade_mode: Mapped[UpgradeMode] = mapped_column( String(16), nullable=False, server_default="exclude", default=UpgradeMode.EXCLUDE ) - exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) - include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False, default_factory=list) + exclude_plugins: Mapped[list[str]] = mapped_column(sa.JSON, nullable=False, default_factory=list) + include_plugins: Mapped[list[str]] = mapped_column(sa.JSON, nullable=False, default_factory=list) upgrade_time_of_day: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False diff --git a/api/models/api_based_extension.py b/api/models/api_based_extension.py index e86826fc3d..fcb819a9c7 100644 --- a/api/models/api_based_extension.py +++ b/api/models/api_based_extension.py @@ -1,12 +1,13 @@ import enum from datetime import datetime +from uuid import uuid4 import sqlalchemy as sa -from sqlalchemy import DateTime, String, Text, func +from sqlalchemy import DateTime, String, func from sqlalchemy.orm import Mapped, mapped_column from .base import Base -from .types import StringUUID +from .types import LongText, StringUUID class APIBasedExtensionPoint(enum.StrEnum): @@ -23,9 +24,9 @@ class APIBasedExtension(Base): sa.Index("api_based_extension_tenant_idx", "tenant_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) api_endpoint: Mapped[str] = mapped_column(String(255), nullable=False) - api_key = mapped_column(Text, nullable=False) + api_key = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/base.py b/api/models/base.py index 3660068035..78a1fdc2b7 100644 --- a/api/models/base.py +++ b/api/models/base.py @@ -1,6 +1,6 @@ from datetime import datetime -from sqlalchemy import DateTime, func, text +from sqlalchemy import DateTime, func from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column from libs.datetime_utils import naive_utc_now @@ -25,12 +25,11 @@ class DefaultFieldsMixin: id: Mapped[str] = mapped_column( StringUUID, primary_key=True, - # NOTE: The default and server_default serve as fallback mechanisms. + # NOTE: The default serve as fallback mechanisms. # The application can generate the `id` before saving to optimize # the insertion process (especially for interdependent models) # and reduce database roundtrips. - default=uuidv7, - server_default=text("uuidv7()"), + default=lambda: str(uuidv7()), ) created_at: Mapped[datetime] = mapped_column( diff --git a/api/models/dataset.py b/api/models/dataset.py index 4470d11355..6cff6b530c 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -11,16 +11,17 @@ import time from datetime import datetime from json import JSONDecodeError from typing import Any, cast +from uuid import uuid4 import sqlalchemy as sa from sqlalchemy import DateTime, String, func, select -from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, Session, mapped_column from configs import dify_config from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_storage import storage +from libs.uuid_utils import uuidv7 from models.base import TypeBase from services.entities.knowledge_entities.knowledge_entities import ParentMode, Rule @@ -28,7 +29,7 @@ from .account import Account from .base import Base from .engine import db from .model import App, Tag, TagBinding, UploadFile -from .types import StringUUID +from .types import AdjustedJSON, BinaryData, LongText, StringUUID, adjusted_json_index logger = logging.getLogger(__name__) @@ -44,21 +45,21 @@ class Dataset(Base): __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_pkey"), sa.Index("dataset_tenant_idx", "tenant_id"), - sa.Index("retrieval_model_idx", "retrieval_model", postgresql_using="gin"), + adjusted_json_index("retrieval_model_idx", "retrieval_model"), ) INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None] PROVIDER_LIST = ["vendor", "external", None] - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID) name: Mapped[str] = mapped_column(String(255)) - description = mapped_column(sa.Text, nullable=True) - provider: Mapped[str] = mapped_column(String(255), server_default=sa.text("'vendor'::character varying")) - permission: Mapped[str] = mapped_column(String(255), server_default=sa.text("'only_me'::character varying")) + description = mapped_column(LongText, nullable=True) + provider: Mapped[str] = mapped_column(String(255), server_default=sa.text("'vendor'")) + permission: Mapped[str] = mapped_column(String(255), server_default=sa.text("'only_me'")) data_source_type = mapped_column(String(255)) indexing_technique: Mapped[str | None] = mapped_column(String(255)) - index_struct = mapped_column(sa.Text, nullable=True) + index_struct = mapped_column(LongText, nullable=True) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) @@ -69,10 +70,10 @@ class Dataset(Base): embedding_model_provider = mapped_column(sa.String(255), nullable=True) keyword_number = mapped_column(sa.Integer, nullable=True, server_default=sa.text("10")) collection_binding_id = mapped_column(StringUUID, nullable=True) - retrieval_model = mapped_column(JSONB, nullable=True) + retrieval_model = mapped_column(AdjustedJSON, nullable=True) built_in_field_enabled = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - icon_info = mapped_column(JSONB, nullable=True) - runtime_mode = mapped_column(sa.String(255), nullable=True, server_default=sa.text("'general'::character varying")) + icon_info = mapped_column(AdjustedJSON, nullable=True) + runtime_mode = mapped_column(sa.String(255), nullable=True, server_default=sa.text("'general'")) pipeline_id = mapped_column(StringUUID, nullable=True) chunk_structure = mapped_column(sa.String(255), nullable=True) enable_api = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) @@ -307,10 +308,10 @@ class DatasetProcessRule(Base): sa.Index("dataset_process_rule_dataset_id_idx", "dataset_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) dataset_id = mapped_column(StringUUID, nullable=False) - mode = mapped_column(String(255), nullable=False, server_default=sa.text("'automatic'::character varying")) - rules = mapped_column(sa.Text, nullable=True) + mode = mapped_column(String(255), nullable=False, server_default=sa.text("'automatic'")) + rules = mapped_column(LongText, nullable=True) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) @@ -347,16 +348,16 @@ class Document(Base): sa.Index("document_dataset_id_idx", "dataset_id"), sa.Index("document_is_paused_idx", "is_paused"), sa.Index("document_tenant_idx", "tenant_id"), - sa.Index("document_metadata_idx", "doc_metadata", postgresql_using="gin"), + adjusted_json_index("document_metadata_idx", "doc_metadata"), ) # initial fields - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) data_source_type: Mapped[str] = mapped_column(String(255), nullable=False) - data_source_info = mapped_column(sa.Text, nullable=True) + data_source_info = mapped_column(LongText, nullable=True) dataset_process_rule_id = mapped_column(StringUUID, nullable=True) batch: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -369,7 +370,7 @@ class Document(Base): processing_started_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) # parsing - file_id = mapped_column(sa.Text, nullable=True) + file_id = mapped_column(LongText, nullable=True) word_count: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) # TODO: make this not nullable parsing_completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) @@ -390,11 +391,11 @@ class Document(Base): paused_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) # error - error = mapped_column(sa.Text, nullable=True) + error = mapped_column(LongText, nullable=True) stopped_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) # basic fields - indexing_status = mapped_column(String(255), nullable=False, server_default=sa.text("'waiting'::character varying")) + indexing_status = mapped_column(String(255), nullable=False, server_default=sa.text("'waiting'")) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) disabled_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) disabled_by = mapped_column(StringUUID, nullable=True) @@ -406,8 +407,8 @@ class Document(Base): DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) doc_type = mapped_column(String(40), nullable=True) - doc_metadata = mapped_column(JSONB, nullable=True) - doc_form = mapped_column(String(255), nullable=False, server_default=sa.text("'text_model'::character varying")) + doc_metadata = mapped_column(AdjustedJSON, nullable=True) + doc_form = mapped_column(String(255), nullable=False, server_default=sa.text("'text_model'")) doc_language = mapped_column(String(255), nullable=True) DATA_SOURCES = ["upload_file", "notion_import", "website_crawl"] @@ -697,13 +698,13 @@ class DocumentSegment(Base): ) # initial fields - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) document_id = mapped_column(StringUUID, nullable=False) position: Mapped[int] - content = mapped_column(sa.Text, nullable=False) - answer = mapped_column(sa.Text, nullable=True) + content = mapped_column(LongText, nullable=False) + answer = mapped_column(LongText, nullable=True) word_count: Mapped[int] tokens: Mapped[int] @@ -717,7 +718,7 @@ class DocumentSegment(Base): enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) disabled_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) disabled_by = mapped_column(StringUUID, nullable=True) - status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'waiting'::character varying")) + status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'waiting'")) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) @@ -726,7 +727,7 @@ class DocumentSegment(Base): ) indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - error = mapped_column(sa.Text, nullable=True) + error = mapped_column(LongText, nullable=True) stopped_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) @property @@ -870,29 +871,27 @@ class ChildChunk(Base): ) # initial fields - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) document_id = mapped_column(StringUUID, nullable=False) segment_id = mapped_column(StringUUID, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) - content = mapped_column(sa.Text, nullable=False) + content = mapped_column(LongText, nullable=False) word_count: Mapped[int] = mapped_column(sa.Integer, nullable=False) # indexing fields index_node_id = mapped_column(String(255), nullable=True) index_node_hash = mapped_column(String(255), nullable=True) - type = mapped_column(String(255), nullable=False, server_default=sa.text("'automatic'::character varying")) + type = mapped_column(String(255), nullable=False, server_default=sa.text("'automatic'")) created_by = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") - ) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), onupdate=func.current_timestamp() + DateTime, nullable=False, server_default=sa.func.current_timestamp(), onupdate=func.current_timestamp() ) indexing_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - error = mapped_column(sa.Text, nullable=True) + error = mapped_column(LongText, nullable=True) @property def dataset(self): @@ -915,7 +914,7 @@ class AppDatasetJoin(TypeBase): ) id: Mapped[str] = mapped_column( - StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"), init=False + StringUUID, primary_key=True, nullable=False, default=lambda: str(uuid4()), init=False ) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -935,12 +934,12 @@ class DatasetQuery(Base): sa.Index("dataset_query_dataset_id_idx", "dataset_id"), ) - id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, primary_key=True, nullable=False, default=lambda: str(uuid4())) dataset_id = mapped_column(StringUUID, nullable=False) - content = mapped_column(sa.Text, nullable=False) + content = mapped_column(LongText, nullable=False) source: Mapped[str] = mapped_column(String(255), nullable=False) source_app_id = mapped_column(StringUUID, nullable=True) - created_by_role = mapped_column(String, nullable=False) + created_by_role = mapped_column(String(255), nullable=False) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) @@ -952,13 +951,11 @@ class DatasetKeywordTable(TypeBase): sa.Index("dataset_keyword_table_dataset_id_idx", "dataset_id"), ) - id: Mapped[str] = mapped_column( - StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"), init=False - ) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()), init=False) dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False, unique=True) - keyword_table: Mapped[str] = mapped_column(sa.Text, nullable=False) + keyword_table: Mapped[str] = mapped_column(LongText, nullable=False) data_source_type: Mapped[str] = mapped_column( - String(255), nullable=False, server_default=sa.text("'database'::character varying"), default="database" + String(255), nullable=False, server_default=sa.text("'database'"), default="database" ) @property @@ -1005,14 +1002,12 @@ class Embedding(Base): sa.Index("created_at_idx", "created_at"), ) - id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()")) - model_name = mapped_column( - String(255), nullable=False, server_default=sa.text("'text-embedding-ada-002'::character varying") - ) + id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) + model_name = mapped_column(String(255), nullable=False, server_default=sa.text("'text-embedding-ada-002'")) hash = mapped_column(String(64), nullable=False) - embedding = mapped_column(sa.LargeBinary, nullable=False) + embedding = mapped_column(BinaryData, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - provider_name = mapped_column(String(255), nullable=False, server_default=sa.text("''::character varying")) + provider_name = mapped_column(String(255), nullable=False, server_default=sa.text("''")) def set_embedding(self, embedding_data: list[float]): self.embedding = pickle.dumps(embedding_data, protocol=pickle.HIGHEST_PROTOCOL) @@ -1028,10 +1023,10 @@ class DatasetCollectionBinding(Base): sa.Index("provider_model_name_idx", "provider_name", "model_name"), ) - id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) - type = mapped_column(String(40), server_default=sa.text("'dataset'::character varying"), nullable=False) + type = mapped_column(String(40), server_default=sa.text("'dataset'"), nullable=False) collection_name = mapped_column(String(64), nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) @@ -1045,12 +1040,12 @@ class TidbAuthBinding(Base): sa.Index("tidb_auth_bindings_created_at_idx", "created_at"), sa.Index("tidb_auth_bindings_status_idx", "status"), ) - id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=True) cluster_id: Mapped[str] = mapped_column(String(255), nullable=False) cluster_name: Mapped[str] = mapped_column(String(255), nullable=False) active: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - status = mapped_column(String(255), nullable=False, server_default=sa.text("'CREATING'::character varying")) + status = mapped_column(sa.String(255), nullable=False, server_default=sa.text("'CREATING'")) account: Mapped[str] = mapped_column(String(255), nullable=False) password: Mapped[str] = mapped_column(String(255), nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) @@ -1062,9 +1057,7 @@ class Whitelist(TypeBase): sa.PrimaryKeyConstraint("id", name="whitelists_pkey"), sa.Index("whitelists_tenant_idx", "tenant_id"), ) - id: Mapped[str] = mapped_column( - StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"), init=False - ) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) category: Mapped[str] = mapped_column(String(255), nullable=False) created_at: Mapped[datetime] = mapped_column( @@ -1081,9 +1074,7 @@ class DatasetPermission(TypeBase): sa.Index("idx_dataset_permissions_tenant_id", "tenant_id"), ) - id: Mapped[str] = mapped_column( - StringUUID, server_default=sa.text("uuid_generate_v4()"), primary_key=True, init=False - ) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), primary_key=True, init=False) dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -1103,13 +1094,11 @@ class ExternalKnowledgeApis(TypeBase): sa.Index("external_knowledge_apis_name_idx", "name"), ) - id: Mapped[str] = mapped_column( - StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()"), init=False - ) + id: Mapped[str] = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4()), init=False) name: Mapped[str] = mapped_column(String(255), nullable=False) description: Mapped[str] = mapped_column(String(255), nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - settings: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + settings: Mapped[str | None] = mapped_column(LongText, nullable=True) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False @@ -1162,11 +1151,11 @@ class ExternalKnowledgeBindings(Base): sa.Index("external_knowledge_bindings_external_knowledge_api_idx", "external_knowledge_api_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, nullable=False, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) external_knowledge_api_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) - external_knowledge_id = mapped_column(sa.Text, nullable=False) + external_knowledge_id = mapped_column(String(512), nullable=False) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) @@ -1184,14 +1173,12 @@ class DatasetAutoDisableLog(Base): sa.Index("dataset_auto_disable_log_created_atx", "created_at"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) document_id = mapped_column(StringUUID, nullable=False) notified: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - created_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") - ) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) class RateLimitLog(TypeBase): @@ -1202,12 +1189,12 @@ class RateLimitLog(TypeBase): sa.Index("rate_limit_log_operation_idx", "operation"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) subscription_plan: Mapped[str] = mapped_column(String(255), nullable=False) operation: Mapped[str] = mapped_column(String(255), nullable=False) created_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) @@ -1219,16 +1206,14 @@ class DatasetMetadata(Base): sa.Index("dataset_metadata_dataset_idx", "dataset_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) - created_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)") - ) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), onupdate=func.current_timestamp() + DateTime, nullable=False, server_default=sa.func.current_timestamp(), onupdate=func.current_timestamp() ) created_by = mapped_column(StringUUID, nullable=False) updated_by = mapped_column(StringUUID, nullable=True) @@ -1244,7 +1229,7 @@ class DatasetMetadataBinding(Base): sa.Index("dataset_metadata_binding_document_idx", "document_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) metadata_id = mapped_column(StringUUID, nullable=False) @@ -1257,12 +1242,12 @@ class PipelineBuiltInTemplate(Base): # type: ignore[name-defined] __tablename__ = "pipeline_built_in_templates" __table_args__ = (sa.PrimaryKeyConstraint("id", name="pipeline_built_in_template_pkey"),) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) name = mapped_column(sa.String(255), nullable=False) - description = mapped_column(sa.Text, nullable=False) + description = mapped_column(LongText, nullable=False) chunk_structure = mapped_column(sa.String(255), nullable=False) icon = mapped_column(sa.JSON, nullable=False) - yaml_content = mapped_column(sa.Text, nullable=False) + yaml_content = mapped_column(LongText, nullable=False) copyright = mapped_column(sa.String(255), nullable=False) privacy_policy = mapped_column(sa.String(255), nullable=False) position = mapped_column(sa.Integer, nullable=False) @@ -1281,14 +1266,14 @@ class PipelineCustomizedTemplate(Base): # type: ignore[name-defined] sa.Index("pipeline_customized_template_tenant_idx", "tenant_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id = mapped_column(StringUUID, nullable=False) name = mapped_column(sa.String(255), nullable=False) - description = mapped_column(sa.Text, nullable=False) + description = mapped_column(LongText, nullable=False) chunk_structure = mapped_column(sa.String(255), nullable=False) icon = mapped_column(sa.JSON, nullable=False) position = mapped_column(sa.Integer, nullable=False) - yaml_content = mapped_column(sa.Text, nullable=False) + yaml_content = mapped_column(LongText, nullable=False) install_count = mapped_column(sa.Integer, nullable=False, default=0) language = mapped_column(sa.String(255), nullable=False) created_by = mapped_column(StringUUID, nullable=False) @@ -1310,10 +1295,10 @@ class Pipeline(Base): # type: ignore[name-defined] __tablename__ = "pipelines" __table_args__ = (sa.PrimaryKeyConstraint("id", name="pipeline_pkey"),) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) name = mapped_column(sa.String(255), nullable=False) - description = mapped_column(sa.Text, nullable=False, server_default=sa.text("''::character varying")) + description = mapped_column(LongText, nullable=False, default=sa.text("''")) workflow_id = mapped_column(StringUUID, nullable=True) is_public = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) is_published = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) @@ -1335,11 +1320,11 @@ class DocumentPipelineExecutionLog(Base): sa.Index("document_pipeline_execution_logs_document_id_idx", "document_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) pipeline_id = mapped_column(StringUUID, nullable=False) document_id = mapped_column(StringUUID, nullable=False) datasource_type = mapped_column(sa.String(255), nullable=False) - datasource_info = mapped_column(sa.Text, nullable=False) + datasource_info = mapped_column(LongText, nullable=False) datasource_node_id = mapped_column(sa.String(255), nullable=False) input_data = mapped_column(sa.JSON, nullable=False) created_by = mapped_column(StringUUID, nullable=True) @@ -1350,9 +1335,9 @@ class PipelineRecommendedPlugin(Base): __tablename__ = "pipeline_recommended_plugins" __table_args__ = (sa.PrimaryKeyConstraint("id", name="pipeline_recommended_plugin_pkey"),) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) - plugin_id = mapped_column(sa.Text, nullable=False) - provider_name = mapped_column(sa.Text, nullable=False) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) + plugin_id = mapped_column(LongText, nullable=False) + provider_name = mapped_column(LongText, nullable=False) position = mapped_column(sa.Integer, nullable=False, default=0) active = mapped_column(sa.Boolean, nullable=False, default=True) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/model.py b/api/models/model.py index f698b79d32..9949d4c20f 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -6,6 +6,7 @@ from datetime import datetime from decimal import Decimal from enum import StrEnum, auto from typing import TYPE_CHECKING, Any, Literal, Optional, cast +from uuid import uuid4 import sqlalchemy as sa from flask import request @@ -20,13 +21,14 @@ from core.file import helpers as file_helpers from core.tools.signature import sign_tool_file from core.workflow.enums import WorkflowExecutionStatus from libs.helper import generate_string # type: ignore[import-not-found] +from libs.uuid_utils import uuidv7 from .account import Account, Tenant from .base import Base from .engine import db from .enums import CreatorUserRole from .provider_ids import GenericProviderID -from .types import StringUUID +from .types import LongText, StringUUID if TYPE_CHECKING: from models.workflow import Workflow @@ -72,17 +74,17 @@ class App(Base): __tablename__ = "apps" __table_args__ = (sa.PrimaryKeyConstraint("id", name="app_pkey"), sa.Index("app_tenant_id_idx", "tenant_id")) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID) name: Mapped[str] = mapped_column(String(255)) - description: Mapped[str] = mapped_column(sa.Text, server_default=sa.text("''::character varying")) + description: Mapped[str] = mapped_column(LongText, default=sa.text("''")) mode: Mapped[str] = mapped_column(String(255)) icon_type: Mapped[str | None] = mapped_column(String(255)) # image, emoji icon = mapped_column(String(255)) icon_background: Mapped[str | None] = mapped_column(String(255)) app_model_config_id = mapped_column(StringUUID, nullable=True) workflow_id = mapped_column(StringUUID, nullable=True) - status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'::character varying")) + status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'")) enable_site: Mapped[bool] = mapped_column(sa.Boolean) enable_api: Mapped[bool] = mapped_column(sa.Boolean) api_rpm: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0")) @@ -90,7 +92,7 @@ class App(Base): is_demo: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false")) is_public: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false")) is_universal: Mapped[bool] = mapped_column(sa.Boolean, server_default=sa.text("false")) - tracing = mapped_column(sa.Text, nullable=True) + tracing = mapped_column(LongText, nullable=True) max_active_requests: Mapped[int | None] created_by = mapped_column(StringUUID, nullable=True) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @@ -308,7 +310,7 @@ class AppModelConfig(Base): __tablename__ = "app_model_configs" __table_args__ = (sa.PrimaryKeyConstraint("id", name="app_model_config_pkey"), sa.Index("app_app_id_idx", "app_id")) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) provider = mapped_column(String(255), nullable=True) model_id = mapped_column(String(255), nullable=True) @@ -319,25 +321,25 @@ class AppModelConfig(Base): updated_at = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) - opening_statement = mapped_column(sa.Text) - suggested_questions = mapped_column(sa.Text) - suggested_questions_after_answer = mapped_column(sa.Text) - speech_to_text = mapped_column(sa.Text) - text_to_speech = mapped_column(sa.Text) - more_like_this = mapped_column(sa.Text) - model = mapped_column(sa.Text) - user_input_form = mapped_column(sa.Text) + opening_statement = mapped_column(LongText) + suggested_questions = mapped_column(LongText) + suggested_questions_after_answer = mapped_column(LongText) + speech_to_text = mapped_column(LongText) + text_to_speech = mapped_column(LongText) + more_like_this = mapped_column(LongText) + model = mapped_column(LongText) + user_input_form = mapped_column(LongText) dataset_query_variable = mapped_column(String(255)) - pre_prompt = mapped_column(sa.Text) - agent_mode = mapped_column(sa.Text) - sensitive_word_avoidance = mapped_column(sa.Text) - retriever_resource = mapped_column(sa.Text) - prompt_type = mapped_column(String(255), nullable=False, server_default=sa.text("'simple'::character varying")) - chat_prompt_config = mapped_column(sa.Text) - completion_prompt_config = mapped_column(sa.Text) - dataset_configs = mapped_column(sa.Text) - external_data_tools = mapped_column(sa.Text) - file_upload = mapped_column(sa.Text) + pre_prompt = mapped_column(LongText) + agent_mode = mapped_column(LongText) + sensitive_word_avoidance = mapped_column(LongText) + retriever_resource = mapped_column(LongText) + prompt_type = mapped_column(String(255), nullable=False, server_default=sa.text("'simple'")) + chat_prompt_config = mapped_column(LongText) + completion_prompt_config = mapped_column(LongText) + dataset_configs = mapped_column(LongText) + external_data_tools = mapped_column(LongText) + file_upload = mapped_column(LongText) @property def app(self) -> App | None: @@ -537,17 +539,17 @@ class RecommendedApp(Base): sa.Index("recommended_app_is_listed_idx", "is_listed", "language"), ) - id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) description = mapped_column(sa.JSON, nullable=False) copyright: Mapped[str] = mapped_column(String(255), nullable=False) privacy_policy: Mapped[str] = mapped_column(String(255), nullable=False) - custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") + custom_disclaimer: Mapped[str] = mapped_column(LongText, default="") category: Mapped[str] = mapped_column(String(255), nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) is_listed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True) install_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) - language = mapped_column(String(255), nullable=False, server_default=sa.text("'en-US'::character varying")) + language = mapped_column(String(255), nullable=False, server_default=sa.text("'en-US'")) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) updated_at = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() @@ -568,7 +570,7 @@ class InstalledApp(Base): sa.UniqueConstraint("tenant_id", "app_id", name="unique_tenant_app"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) app_id = mapped_column(StringUUID, nullable=False) app_owner_tenant_id = mapped_column(StringUUID, nullable=False) @@ -600,18 +602,18 @@ class OAuthProviderApp(Base): sa.Index("oauth_provider_app_client_id_idx", "client_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) app_icon = mapped_column(String(255), nullable=False) - app_label = mapped_column(sa.JSON, nullable=False, server_default="{}") + app_label = mapped_column(sa.JSON, nullable=False, default="{}") client_id = mapped_column(String(255), nullable=False) client_secret = mapped_column(String(255), nullable=False) - redirect_uris = mapped_column(sa.JSON, nullable=False, server_default="[]") + redirect_uris = mapped_column(sa.JSON, nullable=False, default="[]") scope = mapped_column( String(255), nullable=False, server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), ) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) class Conversation(Base): @@ -621,18 +623,18 @@ class Conversation(Base): sa.Index("conversation_app_from_user_idx", "app_id", "from_source", "from_end_user_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) app_model_config_id = mapped_column(StringUUID, nullable=True) model_provider = mapped_column(String(255), nullable=True) - override_model_configs = mapped_column(sa.Text) + override_model_configs = mapped_column(LongText) model_id = mapped_column(String(255), nullable=True) mode: Mapped[str] = mapped_column(String(255)) name: Mapped[str] = mapped_column(String(255), nullable=False) - summary = mapped_column(sa.Text) + summary = mapped_column(LongText) _inputs: Mapped[dict[str, Any]] = mapped_column("inputs", sa.JSON) - introduction = mapped_column(sa.Text) - system_instruction = mapped_column(sa.Text) + introduction = mapped_column(LongText) + system_instruction = mapped_column(LongText) system_instruction_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) status: Mapped[str] = mapped_column(String(255), nullable=False) @@ -922,21 +924,21 @@ class Message(Base): Index("message_app_mode_idx", "app_mode"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) model_provider: Mapped[str | None] = mapped_column(String(255), nullable=True) model_id: Mapped[str | None] = mapped_column(String(255), nullable=True) - override_model_configs: Mapped[str | None] = mapped_column(sa.Text) + override_model_configs: Mapped[str | None] = mapped_column(LongText) conversation_id: Mapped[str] = mapped_column(StringUUID, sa.ForeignKey("conversations.id"), nullable=False) _inputs: Mapped[dict[str, Any]] = mapped_column("inputs", sa.JSON) - query: Mapped[str] = mapped_column(sa.Text, nullable=False) + query: Mapped[str] = mapped_column(LongText, nullable=False) message: Mapped[dict[str, Any]] = mapped_column(sa.JSON, nullable=False) message_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) message_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False) message_price_unit: Mapped[Decimal] = mapped_column( sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001") ) - answer: Mapped[str] = mapped_column(sa.Text, nullable=False) + answer: Mapped[str] = mapped_column(LongText, nullable=False) answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) answer_unit_price: Mapped[Decimal] = mapped_column(sa.Numeric(10, 4), nullable=False) answer_price_unit: Mapped[Decimal] = mapped_column( @@ -946,11 +948,9 @@ class Message(Base): provider_response_latency: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) total_price: Mapped[Decimal | None] = mapped_column(sa.Numeric(10, 7)) currency: Mapped[str] = mapped_column(String(255), nullable=False) - status: Mapped[str] = mapped_column( - String(255), nullable=False, server_default=sa.text("'normal'::character varying") - ) - error: Mapped[str | None] = mapped_column(sa.Text) - message_metadata: Mapped[str | None] = mapped_column(sa.Text) + status: Mapped[str] = mapped_column(String(255), nullable=False, server_default=sa.text("'normal'")) + error: Mapped[str | None] = mapped_column(LongText) + message_metadata: Mapped[str | None] = mapped_column(LongText) invoke_from: Mapped[str | None] = mapped_column(String(255), nullable=True) from_source: Mapped[str] = mapped_column(String(255), nullable=False) from_end_user_id: Mapped[str | None] = mapped_column(StringUUID) @@ -1296,12 +1296,12 @@ class MessageFeedback(Base): sa.Index("message_feedback_conversation_idx", "conversation_id", "from_source", "rating"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) rating: Mapped[str] = mapped_column(String(255), nullable=False) - content: Mapped[str | None] = mapped_column(sa.Text) + content: Mapped[str | None] = mapped_column(LongText) from_source: Mapped[str] = mapped_column(String(255), nullable=False) from_end_user_id: Mapped[str | None] = mapped_column(StringUUID) from_account_id: Mapped[str | None] = mapped_column(StringUUID) @@ -1360,11 +1360,11 @@ class MessageFile(Base): self.created_by_role = created_by_role.value self.created_by = created_by - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) transfer_method: Mapped[str] = mapped_column(String(255), nullable=False) - url: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + url: Mapped[str | None] = mapped_column(LongText, nullable=True) belongs_to: Mapped[str | None] = mapped_column(String(255), nullable=True) upload_file_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1381,12 +1381,12 @@ class MessageAnnotation(Base): sa.Index("message_annotation_message_idx", "message_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id: Mapped[str] = mapped_column(StringUUID) conversation_id: Mapped[str | None] = mapped_column(StringUUID, sa.ForeignKey("conversations.id")) message_id: Mapped[str | None] = mapped_column(StringUUID) - question = mapped_column(sa.Text, nullable=True) - content = mapped_column(sa.Text, nullable=False) + question = mapped_column(LongText, nullable=True) + content = mapped_column(LongText, nullable=False) hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) account_id = mapped_column(StringUUID, nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @@ -1415,17 +1415,17 @@ class AppAnnotationHitHistory(Base): sa.Index("app_annotation_hit_histories_message_idx", "message_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) annotation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - source = mapped_column(sa.Text, nullable=False) - question = mapped_column(sa.Text, nullable=False) + source = mapped_column(LongText, nullable=False) + question = mapped_column(LongText, nullable=False) account_id = mapped_column(StringUUID, nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) score = mapped_column(Float, nullable=False, server_default=sa.text("0")) message_id = mapped_column(StringUUID, nullable=False) - annotation_question = mapped_column(sa.Text, nullable=False) - annotation_content = mapped_column(sa.Text, nullable=False) + annotation_question = mapped_column(LongText, nullable=False) + annotation_content = mapped_column(LongText, nullable=False) @property def account(self): @@ -1450,7 +1450,7 @@ class AppAnnotationSetting(Base): sa.Index("app_annotation_settings_app_idx", "app_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) score_threshold = mapped_column(Float, nullable=False, server_default=sa.text("0")) collection_binding_id = mapped_column(StringUUID, nullable=False) @@ -1480,7 +1480,7 @@ class OperationLog(Base): sa.Index("operation_log_account_action_idx", "tenant_id", "account_id", "action"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) account_id = mapped_column(StringUUID, nullable=False) action: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1508,7 +1508,7 @@ class EndUser(Base, UserMixin): sa.Index("end_user_tenant_session_id_idx", "tenant_id", "session_id", "type"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id = mapped_column(StringUUID, nullable=True) type: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1526,7 +1526,7 @@ class EndUser(Base, UserMixin): def is_anonymous(self, value: bool) -> None: self._is_anonymous = value - session_id: Mapped[str] = mapped_column() + session_id: Mapped[str] = mapped_column(String(255), nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) updated_at = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() @@ -1540,14 +1540,14 @@ class AppMCPServer(Base): sa.UniqueConstraint("tenant_id", "app_id", name="unique_app_mcp_server_tenant_app_id"), sa.UniqueConstraint("server_code", name="unique_app_mcp_server_server_code"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) app_id = mapped_column(StringUUID, nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) description: Mapped[str] = mapped_column(String(255), nullable=False) server_code: Mapped[str] = mapped_column(String(255), nullable=False) - status = mapped_column(String(255), nullable=False, server_default=sa.text("'normal'::character varying")) - parameters = mapped_column(sa.Text, nullable=False) + status = mapped_column(String(255), nullable=False, server_default=sa.text("'normal'")) + parameters = mapped_column(LongText, nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) updated_at = mapped_column( @@ -1576,13 +1576,13 @@ class Site(Base): sa.Index("site_code_idx", "code", "status"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False) icon_type = mapped_column(String(255), nullable=True) icon = mapped_column(String(255)) icon_background = mapped_column(String(255)) - description = mapped_column(sa.Text) + description = mapped_column(LongText) default_language: Mapped[str] = mapped_column(String(255), nullable=False) chat_color_theme = mapped_column(String(255)) chat_color_theme_inverted: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) @@ -1590,11 +1590,11 @@ class Site(Base): privacy_policy = mapped_column(String(255)) show_workflow_steps: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) use_icon_as_answer_icon: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - _custom_disclaimer: Mapped[str] = mapped_column("custom_disclaimer", sa.TEXT, default="") + _custom_disclaimer: Mapped[str] = mapped_column("custom_disclaimer", LongText, default="") customize_domain = mapped_column(String(255)) customize_token_strategy: Mapped[str] = mapped_column(String(255), nullable=False) prompt_public: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - status = mapped_column(String(255), nullable=False, server_default=sa.text("'normal'::character varying")) + status = mapped_column(String(255), nullable=False, server_default=sa.text("'normal'")) created_by = mapped_column(StringUUID, nullable=True) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) @@ -1636,7 +1636,7 @@ class ApiToken(Base): sa.Index("api_token_tenant_idx", "tenant_id", "type"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=True) tenant_id = mapped_column(StringUUID, nullable=True) type = mapped_column(String(16), nullable=False) @@ -1663,7 +1663,7 @@ class UploadFile(Base): # NOTE: The `id` field is generated within the application to minimize extra roundtrips # (especially when generating `source_url`). # The `server_default` serves as a fallback mechanism. - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) storage_type: Mapped[str] = mapped_column(String(255), nullable=False) key: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1674,9 +1674,7 @@ class UploadFile(Base): # The `created_by_role` field indicates whether the file was created by an `Account` or an `EndUser`. # Its value is derived from the `CreatorUserRole` enumeration. - created_by_role: Mapped[str] = mapped_column( - String(255), nullable=False, server_default=sa.text("'account'::character varying") - ) + created_by_role: Mapped[str] = mapped_column(String(255), nullable=False, server_default=sa.text("'account'")) # The `created_by` field stores the ID of the entity that created this upload file. # @@ -1700,7 +1698,7 @@ class UploadFile(Base): used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True) used_at: Mapped[datetime | None] = mapped_column(sa.DateTime, nullable=True) hash: Mapped[str | None] = mapped_column(String(255), nullable=True) - source_url: Mapped[str] = mapped_column(sa.TEXT, default="") + source_url: Mapped[str] = mapped_column(LongText, default="") def __init__( self, @@ -1746,12 +1744,12 @@ class ApiRequest(Base): sa.Index("api_request_token_idx", "tenant_id", "api_token_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=False) api_token_id = mapped_column(StringUUID, nullable=False) path: Mapped[str] = mapped_column(String(255), nullable=False) - request = mapped_column(sa.Text, nullable=True) - response = mapped_column(sa.Text, nullable=True) + request = mapped_column(LongText, nullable=True) + response = mapped_column(LongText, nullable=True) ip: Mapped[str] = mapped_column(String(255), nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @@ -1763,11 +1761,11 @@ class MessageChain(Base): sa.Index("message_chain_message_id_idx", "message_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) message_id = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) - input = mapped_column(sa.Text, nullable=True) - output = mapped_column(sa.Text, nullable=True) + input = mapped_column(LongText, nullable=True) + output = mapped_column(LongText, nullable=True) created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) @@ -1779,32 +1777,32 @@ class MessageAgentThought(Base): sa.Index("message_agent_thought_message_chain_id_idx", "message_chain_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) message_id = mapped_column(StringUUID, nullable=False) message_chain_id = mapped_column(StringUUID, nullable=True) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) - thought = mapped_column(sa.Text, nullable=True) - tool = mapped_column(sa.Text, nullable=True) - tool_labels_str = mapped_column(sa.Text, nullable=False, server_default=sa.text("'{}'::text")) - tool_meta_str = mapped_column(sa.Text, nullable=False, server_default=sa.text("'{}'::text")) - tool_input = mapped_column(sa.Text, nullable=True) - observation = mapped_column(sa.Text, nullable=True) + thought = mapped_column(LongText, nullable=True) + tool = mapped_column(LongText, nullable=True) + tool_labels_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) + tool_meta_str = mapped_column(LongText, nullable=False, default=sa.text("'{}'")) + tool_input = mapped_column(LongText, nullable=True) + observation = mapped_column(LongText, nullable=True) # plugin_id = mapped_column(StringUUID, nullable=True) ## for future design - tool_process_data = mapped_column(sa.Text, nullable=True) - message = mapped_column(sa.Text, nullable=True) + tool_process_data = mapped_column(LongText, nullable=True) + message = mapped_column(LongText, nullable=True) message_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) message_unit_price = mapped_column(sa.Numeric, nullable=True) message_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - message_files = mapped_column(sa.Text, nullable=True) - answer = mapped_column(sa.Text, nullable=True) + message_files = mapped_column(LongText, nullable=True) + answer = mapped_column(LongText, nullable=True) answer_token: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) answer_unit_price = mapped_column(sa.Numeric, nullable=True) answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) total_price = mapped_column(sa.Numeric, nullable=True) - currency = mapped_column(String, nullable=True) + currency = mapped_column(String(255), nullable=True) latency: Mapped[float | None] = mapped_column(sa.Float, nullable=True) - created_by_role = mapped_column(String, nullable=False) + created_by_role = mapped_column(String(255), nullable=False) created_by = mapped_column(StringUUID, nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) @@ -1892,22 +1890,22 @@ class DatasetRetrieverResource(Base): sa.Index("dataset_retriever_resource_message_id_idx", "message_id"), ) - id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) message_id = mapped_column(StringUUID, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) dataset_id = mapped_column(StringUUID, nullable=False) - dataset_name = mapped_column(sa.Text, nullable=False) + dataset_name = mapped_column(LongText, nullable=False) document_id = mapped_column(StringUUID, nullable=True) - document_name = mapped_column(sa.Text, nullable=False) - data_source_type = mapped_column(sa.Text, nullable=True) + document_name = mapped_column(LongText, nullable=False) + data_source_type = mapped_column(LongText, nullable=True) segment_id = mapped_column(StringUUID, nullable=True) score: Mapped[float | None] = mapped_column(sa.Float, nullable=True) - content = mapped_column(sa.Text, nullable=False) + content = mapped_column(LongText, nullable=False) hit_count: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) word_count: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) segment_position: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - index_node_hash = mapped_column(sa.Text, nullable=True) - retriever_from = mapped_column(sa.Text, nullable=False) + index_node_hash = mapped_column(LongText, nullable=True) + retriever_from = mapped_column(LongText, nullable=False) created_by = mapped_column(StringUUID, nullable=False) created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) @@ -1922,7 +1920,7 @@ class Tag(Base): TAG_TYPE_LIST = ["knowledge", "app"] - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=True) type = mapped_column(String(16), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -1938,7 +1936,7 @@ class TagBinding(Base): sa.Index("tag_bind_tag_id_idx", "tag_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id = mapped_column(StringUUID, nullable=True) tag_id = mapped_column(StringUUID, nullable=True) target_id = mapped_column(StringUUID, nullable=True) @@ -1953,7 +1951,7 @@ class TraceAppConfig(Base): sa.Index("trace_app_config_app_id_idx", "app_id"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=False) tracing_provider = mapped_column(String(255), nullable=True) tracing_config = mapped_column(sa.JSON, nullable=True) diff --git a/api/models/oauth.py b/api/models/oauth.py index e705b3d189..f2bb4eff2b 100644 --- a/api/models/oauth.py +++ b/api/models/oauth.py @@ -2,11 +2,12 @@ from datetime import datetime import sqlalchemy as sa from sqlalchemy import func -from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column +from libs.uuid_utils import uuidv7 + from .base import Base -from .types import StringUUID +from .types import AdjustedJSON, LongText, StringUUID class DatasourceOauthParamConfig(Base): # type: ignore[name-defined] @@ -16,10 +17,10 @@ class DatasourceOauthParamConfig(Base): # type: ignore[name-defined] sa.UniqueConstraint("plugin_id", "provider", name="datasource_oauth_config_datasource_id_provider_idx"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False) provider: Mapped[str] = mapped_column(sa.String(255), nullable=False) - system_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) + system_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False) class DatasourceProvider(Base): @@ -29,14 +30,14 @@ class DatasourceProvider(Base): sa.UniqueConstraint("tenant_id", "plugin_id", "provider", "name", name="datasource_provider_unique_name"), sa.Index("datasource_provider_auth_type_provider_idx", "tenant_id", "plugin_id", "provider"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id = mapped_column(StringUUID, nullable=False) name: Mapped[str] = mapped_column(sa.String(255), nullable=False) - provider: Mapped[str] = mapped_column(sa.String(255), nullable=False) + provider: Mapped[str] = mapped_column(sa.String(128), nullable=False) plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False) auth_type: Mapped[str] = mapped_column(sa.String(255), nullable=False) - encrypted_credentials: Mapped[dict] = mapped_column(JSONB, nullable=False) - avatar_url: Mapped[str] = mapped_column(sa.Text, nullable=True, default="default") + encrypted_credentials: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False) + avatar_url: Mapped[str] = mapped_column(LongText, nullable=True, default="default") is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) expires_at: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default="-1") @@ -53,11 +54,11 @@ class DatasourceOauthTenantParamConfig(Base): sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="datasource_oauth_tenant_config_unique"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id = mapped_column(StringUUID, nullable=False) provider: Mapped[str] = mapped_column(sa.String(255), nullable=False) plugin_id: Mapped[str] = mapped_column(sa.String(255), nullable=False) - client_params: Mapped[dict] = mapped_column(JSONB, nullable=False, default={}) + client_params: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False, default={}) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/provider.py b/api/models/provider.py index 4de17a7fd5..07e7384ad5 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -1,14 +1,17 @@ from datetime import datetime from enum import StrEnum, auto from functools import cached_property +from uuid import uuid4 import sqlalchemy as sa from sqlalchemy import DateTime, String, func, text from sqlalchemy.orm import Mapped, mapped_column +from libs.uuid_utils import uuidv7 + from .base import Base, TypeBase from .engine import db -from .types import StringUUID +from .types import LongText, StringUUID class ProviderType(StrEnum): @@ -55,19 +58,17 @@ class Provider(TypeBase): ), ) - id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuidv7()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuidv7()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) provider_type: Mapped[str] = mapped_column( - String(40), nullable=False, server_default=text("'custom'::character varying"), default="custom" + String(40), nullable=False, server_default=text("'custom'"), default="custom" ) is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false"), default=False) last_used: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, init=False) credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) - quota_type: Mapped[str | None] = mapped_column( - String(40), nullable=True, server_default=text("''::character varying"), default="" - ) + quota_type: Mapped[str | None] = mapped_column(String(40), nullable=True, server_default=text("''"), default="") quota_limit: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=None) quota_used: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, default=0) @@ -131,7 +132,7 @@ class ProviderModel(Base): ), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -170,7 +171,7 @@ class TenantDefaultModel(Base): sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -188,7 +189,7 @@ class TenantPreferredModelProvider(Base): sa.Index("tenant_preferred_model_provider_tenant_provider_idx", "tenant_id", "provider_name"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) preferred_provider_type: Mapped[str] = mapped_column(String(40), nullable=False) @@ -205,7 +206,7 @@ class ProviderOrder(Base): sa.Index("provider_order_tenant_provider_idx", "tenant_id", "provider_name"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -215,9 +216,7 @@ class ProviderOrder(Base): quantity: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=text("1")) currency: Mapped[str | None] = mapped_column(String(40)) total_amount: Mapped[int | None] = mapped_column(sa.Integer) - payment_status: Mapped[str] = mapped_column( - String(40), nullable=False, server_default=text("'wait_pay'::character varying") - ) + payment_status: Mapped[str] = mapped_column(String(40), nullable=False, server_default=text("'wait_pay'")) paid_at: Mapped[datetime | None] = mapped_column(DateTime) pay_failed_at: Mapped[datetime | None] = mapped_column(DateTime) refunded_at: Mapped[datetime | None] = mapped_column(DateTime) @@ -238,7 +237,7 @@ class ProviderModelSetting(Base): sa.Index("provider_model_setting_tenant_provider_model_idx", "tenant_id", "provider_name", "model_type"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) @@ -262,13 +261,13 @@ class LoadBalancingModelConfig(Base): sa.Index("load_balancing_model_config_tenant_provider_model_idx", "tenant_id", "provider_name", "model_type"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) model_type: Mapped[str] = mapped_column(String(40), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) - encrypted_config: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + encrypted_config: Mapped[str | None] = mapped_column(LongText, nullable=True) credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) credential_source_type: Mapped[str | None] = mapped_column(String(40), nullable=True) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true")) @@ -289,11 +288,11 @@ class ProviderCredential(Base): sa.Index("provider_credential_tenant_provider_idx", "tenant_id", "provider_name"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuidv7()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) credential_name: Mapped[str] = mapped_column(String(255), nullable=False) - encrypted_config: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() @@ -317,13 +316,13 @@ class ProviderModelCredential(Base): ), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuidv7()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) model_type: Mapped[str] = mapped_column(String(40), nullable=False) credential_name: Mapped[str] = mapped_column(String(255), nullable=False) - encrypted_config: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() diff --git a/api/models/source.py b/api/models/source.py index 0ed7c4c70e..ed5d30b48a 100644 --- a/api/models/source.py +++ b/api/models/source.py @@ -1,14 +1,14 @@ import json from datetime import datetime +from uuid import uuid4 import sqlalchemy as sa from sqlalchemy import DateTime, String, func -from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column from models.base import TypeBase -from .types import StringUUID +from .types import AdjustedJSON, LongText, StringUUID, adjusted_json_index class DataSourceOauthBinding(TypeBase): @@ -16,14 +16,14 @@ class DataSourceOauthBinding(TypeBase): __table_args__ = ( sa.PrimaryKeyConstraint("id", name="source_binding_pkey"), sa.Index("source_binding_tenant_id_idx", "tenant_id"), - sa.Index("source_info_idx", "source_info", postgresql_using="gin"), + adjusted_json_index("source_info_idx", "source_info"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) access_token: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - source_info: Mapped[dict] = mapped_column(JSONB, nullable=False) + source_info: Mapped[dict] = mapped_column(AdjustedJSON, nullable=False) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) @@ -45,11 +45,11 @@ class DataSourceApiKeyAuthBinding(TypeBase): sa.Index("data_source_api_key_auth_binding_provider_idx", "provider"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) category: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) - credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) # JSON + credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) # JSON created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) diff --git a/api/models/task.py b/api/models/task.py index 513f167cce..1e00e46643 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -8,6 +8,8 @@ from sqlalchemy.orm import Mapped, mapped_column from libs.datetime_utils import naive_utc_now from models.base import TypeBase +from .types import BinaryData, LongText + class CeleryTask(TypeBase): """Task result/status.""" @@ -19,17 +21,17 @@ class CeleryTask(TypeBase): ) task_id: Mapped[str] = mapped_column(String(155), unique=True) status: Mapped[str] = mapped_column(String(50), default=states.PENDING) - result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + result: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None) date_done: Mapped[datetime | None] = mapped_column( DateTime, default=naive_utc_now, onupdate=naive_utc_now, nullable=True, ) - traceback: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + traceback: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) name: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) - args: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) - kwargs: Mapped[bytes | None] = mapped_column(sa.LargeBinary, nullable=True, default=None) + args: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None) + kwargs: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None) worker: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) retries: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) queue: Mapped[str | None] = mapped_column(String(155), nullable=True, default=None) @@ -44,5 +46,5 @@ class CeleryTaskSet(TypeBase): sa.Integer, sa.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True, init=False ) taskset_id: Mapped[str] = mapped_column(String(155), unique=True) - result: Mapped[bytes | None] = mapped_column(sa.PickleType, nullable=True, default=None) + result: Mapped[bytes | None] = mapped_column(BinaryData, nullable=True, default=None) date_done: Mapped[datetime | None] = mapped_column(DateTime, default=naive_utc_now, nullable=True) diff --git a/api/models/tools.py b/api/models/tools.py index 12acc149b1..360c80c974 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -2,6 +2,7 @@ import json from datetime import datetime from decimal import Decimal from typing import TYPE_CHECKING, Any, cast +from uuid import uuid4 import sqlalchemy as sa from deprecated import deprecated @@ -15,7 +16,7 @@ from models.base import TypeBase from .engine import db from .model import Account, App, Tenant -from .types import StringUUID +from .types import LongText, StringUUID if TYPE_CHECKING: from core.entities.mcp_provider import MCPProviderEntity @@ -32,11 +33,11 @@ class ToolOAuthSystemClient(TypeBase): sa.UniqueConstraint("plugin_id", "provider", name="tool_oauth_system_client_plugin_id_provider_idx"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) # oauth params of the tool provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False) # tenant level tool oauth client params (client_id, client_secret, etc.) @@ -47,14 +48,14 @@ class ToolOAuthTenantClient(TypeBase): sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_tool_oauth_tenant_client"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) + plugin_id: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), init=False) # oauth params of the tool provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False, init=False) + encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False, init=False) @property def oauth_params(self) -> dict[str, Any]: @@ -73,11 +74,11 @@ class BuiltinToolProvider(TypeBase): ) # id of the tool provider - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) name: Mapped[str] = mapped_column( String(256), nullable=False, - server_default=sa.text("'API KEY 1'::character varying"), + server_default=sa.text("'API KEY 1'"), ) # id of the tenant tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) @@ -86,21 +87,21 @@ class BuiltinToolProvider(TypeBase): # name of the tool provider provider: Mapped[str] = mapped_column(String(256), nullable=False) # credential of the tool provider - encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + encrypted_credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) updated_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, - server_default=sa.text("CURRENT_TIMESTAMP(0)"), + server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False, ) is_default: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False) # credential type, e.g., "api-key", "oauth2" credential_type: Mapped[str] = mapped_column( - String(32), nullable=False, server_default=sa.text("'api-key'::character varying"), default="api-key" + String(32), nullable=False, server_default=sa.text("'api-key'"), default="api-key" ) expires_at: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, server_default=sa.text("-1"), default=-1) @@ -122,32 +123,32 @@ class ApiToolProvider(TypeBase): sa.UniqueConstraint("name", "tenant_id", name="unique_api_tool_provider"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # name of the api provider name: Mapped[str] = mapped_column( String(255), nullable=False, - server_default=sa.text("'API KEY 1'::character varying"), + server_default=sa.text("'API KEY 1'"), ) # icon icon: Mapped[str] = mapped_column(String(255), nullable=False) # original schema - schema: Mapped[str] = mapped_column(sa.Text, nullable=False) + schema: Mapped[str] = mapped_column(LongText, nullable=False) schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False) # who created this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # description of the provider - description: Mapped[str] = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(LongText, nullable=False) # json format tools - tools_str: Mapped[str] = mapped_column(sa.Text, nullable=False) + tools_str: Mapped[str] = mapped_column(LongText, nullable=False) # json format credentials - credentials_str: Mapped[str] = mapped_column(sa.Text, nullable=False) + credentials_str: Mapped[str] = mapped_column(LongText, nullable=False) # privacy policy privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None) # custom_disclaimer - custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") + custom_disclaimer: Mapped[str] = mapped_column(LongText, default="") created_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False @@ -198,7 +199,7 @@ class ToolLabelBinding(TypeBase): sa.UniqueConstraint("tool_id", "label_name", name="unique_tool_label_bind"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # tool id tool_id: Mapped[str] = mapped_column(String(64), nullable=False) # tool type @@ -219,7 +220,7 @@ class WorkflowToolProvider(TypeBase): sa.UniqueConstraint("tenant_id", "app_id", name="unique_workflow_tool_provider_app_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # name of the workflow provider name: Mapped[str] = mapped_column(String(255), nullable=False) # label of the workflow provider @@ -235,19 +236,19 @@ class WorkflowToolProvider(TypeBase): # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # description of the provider - description: Mapped[str] = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(LongText, nullable=False) # parameter configuration - parameter_configuration: Mapped[str] = mapped_column(sa.Text, nullable=False, server_default="[]", default="[]") + parameter_configuration: Mapped[str] = mapped_column(LongText, nullable=False, default="[]") # privacy policy privacy_policy: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default=None) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) updated_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, - server_default=sa.text("CURRENT_TIMESTAMP(0)"), + server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False, ) @@ -287,13 +288,13 @@ class MCPToolProvider(TypeBase): sa.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # name of the mcp provider name: Mapped[str] = mapped_column(String(40), nullable=False) # server identifier of the mcp provider server_identifier: Mapped[str] = mapped_column(String(64), nullable=False) # encrypted url of the mcp provider - server_url: Mapped[str] = mapped_column(sa.Text, nullable=False) + server_url: Mapped[str] = mapped_column(LongText, nullable=False) # hash of server_url for uniqueness check server_url_hash: Mapped[str] = mapped_column(String(64), nullable=False) # icon of the mcp provider @@ -303,18 +304,18 @@ class MCPToolProvider(TypeBase): # who created this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # encrypted credentials - encrypted_credentials: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + encrypted_credentials: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) # authed authed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=False) # tools - tools: Mapped[str] = mapped_column(sa.Text, nullable=False, default="[]") + tools: Mapped[str] = mapped_column(LongText, nullable=False, default="[]") created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) updated_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, - server_default=sa.text("CURRENT_TIMESTAMP(0)"), + server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False, ) @@ -323,7 +324,7 @@ class MCPToolProvider(TypeBase): sa.Float, nullable=False, server_default=sa.text("300"), default=300.0 ) # encrypted headers for MCP server requests - encrypted_headers: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + encrypted_headers: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) def load_user(self) -> Account | None: return db.session.query(Account).where(Account.id == self.user_id).first() @@ -368,7 +369,7 @@ class ToolModelInvoke(TypeBase): __tablename__ = "tool_model_invokes" __table_args__ = (sa.PrimaryKeyConstraint("id", name="tool_model_invoke_pkey"),) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # who invoke this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id @@ -380,11 +381,11 @@ class ToolModelInvoke(TypeBase): # tool name tool_name: Mapped[str] = mapped_column(String(128), nullable=False) # invoke parameters - model_parameters: Mapped[str] = mapped_column(sa.Text, nullable=False) + model_parameters: Mapped[str] = mapped_column(LongText, nullable=False) # prompt messages - prompt_messages: Mapped[str] = mapped_column(sa.Text, nullable=False) + prompt_messages: Mapped[str] = mapped_column(LongText, nullable=False) # invoke response - model_response: Mapped[str] = mapped_column(sa.Text, nullable=False) + model_response: Mapped[str] = mapped_column(LongText, nullable=False) prompt_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) @@ -421,7 +422,7 @@ class ToolConversationVariables(TypeBase): sa.Index("conversation_id_idx", "conversation_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # conversation user id user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id @@ -429,7 +430,7 @@ class ToolConversationVariables(TypeBase): # conversation id conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # variables pool - variables_str: Mapped[str] = mapped_column(sa.Text, nullable=False) + variables_str: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False @@ -458,7 +459,7 @@ class ToolFile(TypeBase): sa.Index("tool_file_conversation_id_idx", "conversation_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # conversation user id user_id: Mapped[str] = mapped_column(StringUUID) # tenant id @@ -472,9 +473,9 @@ class ToolFile(TypeBase): # original url original_url: Mapped[str | None] = mapped_column(String(2048), nullable=True, default=None) # name - name: Mapped[str] = mapped_column(default="") + name: Mapped[str] = mapped_column(String(255), default="") # size - size: Mapped[int] = mapped_column(default=-1) + size: Mapped[int] = mapped_column(sa.Integer, default=-1) @deprecated @@ -489,18 +490,18 @@ class DeprecatedPublishedAppTool(TypeBase): sa.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # id of the app app_id: Mapped[str] = mapped_column(StringUUID, ForeignKey("apps.id"), nullable=False) user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # who published this tool - description: Mapped[str] = mapped_column(sa.Text, nullable=False) + description: Mapped[str] = mapped_column(LongText, nullable=False) # llm_description of the tool, for LLM - llm_description: Mapped[str] = mapped_column(sa.Text, nullable=False) + llm_description: Mapped[str] = mapped_column(LongText, nullable=False) # query description, query will be seem as a parameter of the tool, # to describe this parameter to llm, we need this field - query_description: Mapped[str] = mapped_column(sa.Text, nullable=False) + query_description: Mapped[str] = mapped_column(LongText, nullable=False) # query name, the name of the query parameter query_name: Mapped[str] = mapped_column(String(40), nullable=False) # name of the tool provider @@ -508,12 +509,12 @@ class DeprecatedPublishedAppTool(TypeBase): # author author: Mapped[str] = mapped_column(String(40), nullable=False) created_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) updated_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, - server_default=sa.text("CURRENT_TIMESTAMP(0)"), + server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False, ) diff --git a/api/models/trigger.py b/api/models/trigger.py index b537f0cf3f..6fc8683459 100644 --- a/api/models/trigger.py +++ b/api/models/trigger.py @@ -4,6 +4,7 @@ from collections.abc import Mapping from datetime import datetime from functools import cached_property from typing import Any, cast +from uuid import uuid4 import sqlalchemy as sa from sqlalchemy import DateTime, Index, Integer, String, UniqueConstraint, func @@ -14,11 +15,12 @@ from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEnt from core.trigger.entities.entities import Subscription from core.trigger.utils.endpoint import generate_plugin_trigger_endpoint_url, generate_webhook_trigger_endpoint from libs.datetime_utils import naive_utc_now +from libs.uuid_utils import uuidv7 from models.base import Base, TypeBase from models.engine import db from models.enums import AppTriggerStatus, AppTriggerType, CreatorUserRole, WorkflowTriggerStatus from models.model import Account -from models.types import EnumText, StringUUID +from models.types import EnumText, LongText, StringUUID class TriggerSubscription(Base): @@ -38,7 +40,7 @@ class TriggerSubscription(Base): UniqueConstraint("tenant_id", "provider_id", "name", name="unique_trigger_provider"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) name: Mapped[str] = mapped_column(String(255), nullable=False, comment="Subscription instance name") tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -105,11 +107,11 @@ class TriggerOAuthSystemClient(Base): sa.UniqueConstraint("plugin_id", "provider", name="trigger_oauth_system_client_plugin_id_provider_idx"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) - plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + plugin_id: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) # oauth params of the trigger provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column( DateTime, @@ -127,14 +129,14 @@ class TriggerOAuthTenantClient(Base): sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_trigger_oauth_tenant_client"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - plugin_id: Mapped[str] = mapped_column(String(512), nullable=False) + plugin_id: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true")) # oauth params of the trigger provider - encrypted_oauth_params: Mapped[str] = mapped_column(sa.Text, nullable=False) + encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column( DateTime, @@ -190,22 +192,22 @@ class WorkflowTriggerLog(Base): sa.Index("workflow_trigger_log_workflow_id_idx", "workflow_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_run_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) root_node_id: Mapped[str | None] = mapped_column(String(255), nullable=True) - trigger_metadata: Mapped[str] = mapped_column(sa.Text, nullable=False) + trigger_metadata: Mapped[str] = mapped_column(LongText, nullable=False) trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) - trigger_data: Mapped[str] = mapped_column(sa.Text, nullable=False) # Full TriggerData as JSON - inputs: Mapped[str] = mapped_column(sa.Text, nullable=False) # Just inputs for easy viewing - outputs: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + trigger_data: Mapped[str] = mapped_column(LongText, nullable=False) # Full TriggerData as JSON + inputs: Mapped[str] = mapped_column(LongText, nullable=False) # Just inputs for easy viewing + outputs: Mapped[str | None] = mapped_column(LongText, nullable=True) status: Mapped[str] = mapped_column( EnumText(WorkflowTriggerStatus, length=50), nullable=False, default=WorkflowTriggerStatus.PENDING ) - error: Mapped[str | None] = mapped_column(sa.Text, nullable=True) + error: Mapped[str | None] = mapped_column(LongText, nullable=True) queue_name: Mapped[str] = mapped_column(String(100), nullable=False) celery_task_id: Mapped[str | None] = mapped_column(String(255), nullable=True) @@ -285,7 +287,7 @@ class WorkflowWebhookTrigger(Base): sa.UniqueConstraint("webhook_id", name="uniq_webhook_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) node_id: Mapped[str] = mapped_column(String(64), nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -339,7 +341,7 @@ class WorkflowPluginTrigger(Base): sa.UniqueConstraint("app_id", "node_id", name="uniq_app_node_subscription"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) node_id: Mapped[str] = mapped_column(String(64), nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -380,7 +382,7 @@ class AppTrigger(Base): sa.Index("app_trigger_tenant_app_idx", "tenant_id", "app_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) node_id: Mapped[str | None] = mapped_column(String(64), nullable=False) @@ -425,7 +427,7 @@ class WorkflowSchedulePlan(TypeBase): sa.Index("workflow_schedule_plan_next_idx", "next_run_at"), ) - id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuidv7()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuidv7()), init=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) node_id: Mapped[str] = mapped_column(String(64), nullable=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) diff --git a/api/models/types.py b/api/models/types.py index cc69ae4f57..75dc495fed 100644 --- a/api/models/types.py +++ b/api/models/types.py @@ -2,11 +2,15 @@ import enum import uuid from typing import Any, Generic, TypeVar -from sqlalchemy import CHAR, VARCHAR, TypeDecorator -from sqlalchemy.dialects.postgresql import UUID +import sqlalchemy as sa +from sqlalchemy import CHAR, TEXT, VARCHAR, LargeBinary, TypeDecorator +from sqlalchemy.dialects.mysql import LONGBLOB, LONGTEXT +from sqlalchemy.dialects.postgresql import BYTEA, JSONB, UUID from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.sql.type_api import TypeEngine +from configs import dify_config + class StringUUID(TypeDecorator[uuid.UUID | str | None]): impl = CHAR @@ -34,6 +38,78 @@ class StringUUID(TypeDecorator[uuid.UUID | str | None]): return str(value) +class LongText(TypeDecorator[str | None]): + impl = TEXT + cache_ok = True + + def process_bind_param(self, value: str | None, dialect: Dialect) -> str | None: + if value is None: + return value + return value + + def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: + if dialect.name == "postgresql": + return dialect.type_descriptor(TEXT()) + elif dialect.name == "mysql": + return dialect.type_descriptor(LONGTEXT()) + else: + return dialect.type_descriptor(TEXT()) + + def process_result_value(self, value: str | None, dialect: Dialect) -> str | None: + if value is None: + return value + return value + + +class BinaryData(TypeDecorator[bytes | None]): + impl = LargeBinary + cache_ok = True + + def process_bind_param(self, value: bytes | None, dialect: Dialect) -> bytes | None: + if value is None: + return value + return value + + def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: + if dialect.name == "postgresql": + return dialect.type_descriptor(BYTEA()) + elif dialect.name == "mysql": + return dialect.type_descriptor(LONGBLOB()) + else: + return dialect.type_descriptor(LargeBinary()) + + def process_result_value(self, value: bytes | None, dialect: Dialect) -> bytes | None: + if value is None: + return value + return value + + +class AdjustedJSON(TypeDecorator[dict | list | None]): + impl = sa.JSON + cache_ok = True + + def __init__(self, astext_type=None): + self.astext_type = astext_type + super().__init__() + + def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: + if dialect.name == "postgresql": + if self.astext_type: + return dialect.type_descriptor(JSONB(astext_type=self.astext_type)) + else: + return dialect.type_descriptor(JSONB()) + elif dialect.name == "mysql": + return dialect.type_descriptor(sa.JSON()) + else: + return dialect.type_descriptor(sa.JSON()) + + def process_bind_param(self, value: dict | list | None, dialect: Dialect) -> dict | list | None: + return value + + def process_result_value(self, value: dict | list | None, dialect: Dialect) -> dict | list | None: + return value + + _E = TypeVar("_E", bound=enum.StrEnum) @@ -77,3 +153,11 @@ class EnumText(TypeDecorator[_E | None], Generic[_E]): if x is None or y is None: return x is y return x == y + + +def adjusted_json_index(index_name, column_name): + index_name = index_name or f"{column_name}_idx" + if dify_config.DB_TYPE == "postgresql": + return sa.Index(index_name, column_name, postgresql_using="gin") + else: + return None diff --git a/api/models/web.py b/api/models/web.py index 7df5bd6e87..6e1a90af87 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -1,4 +1,5 @@ from datetime import datetime +from uuid import uuid4 import sqlalchemy as sa from sqlalchemy import DateTime, String, func @@ -18,12 +19,10 @@ class SavedMessage(TypeBase): sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - created_by_role: Mapped[str] = mapped_column( - String(255), nullable=False, server_default=sa.text("'end_user'::character varying") - ) + created_by_role: Mapped[str] = mapped_column(String(255), nullable=False, server_default=sa.text("'end_user'")) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( DateTime, @@ -44,13 +43,13 @@ class PinnedConversation(TypeBase): sa.Index("pinned_conversation_conversation_idx", "app_id", "conversation_id", "created_by_role", "created_by"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID) created_by_role: Mapped[str] = mapped_column( String(255), nullable=False, - server_default=sa.text("'end_user'::character varying"), + server_default=sa.text("'end_user'"), ) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( diff --git a/api/models/workflow.py b/api/models/workflow.py index 4eff16dda2..53067744ed 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -17,7 +17,7 @@ from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID, ) -from core.workflow.enums import NodeType, WorkflowExecutionStatus +from core.workflow.enums import NodeType from extensions.ext_storage import Storage from factories.variable_factory import TypeMismatchError, build_segment_with_type from libs.datetime_utils import naive_utc_now @@ -41,7 +41,7 @@ from .account import Account from .base import Base, DefaultFieldsMixin from .engine import db from .enums import CreatorUserRole, DraftVariableType, ExecutionOffLoadType -from .types import EnumText, StringUUID +from .types import EnumText, LongText, StringUUID logger = logging.getLogger(__name__) @@ -125,15 +125,15 @@ class Workflow(Base): sa.Index("workflow_version_idx", "tenant_id", "app_id", "version"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) version: Mapped[str] = mapped_column(String(255), nullable=False) - marked_name: Mapped[str] = mapped_column(default="", server_default="") - marked_comment: Mapped[str] = mapped_column(default="", server_default="") - graph: Mapped[str] = mapped_column(sa.Text) - _features: Mapped[str] = mapped_column("features", sa.TEXT) + marked_name: Mapped[str] = mapped_column(String(255), default="", server_default="") + marked_comment: Mapped[str] = mapped_column(String(255), default="", server_default="") + graph: Mapped[str] = mapped_column(LongText) + _features: Mapped[str] = mapped_column("features", LongText) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by: Mapped[str | None] = mapped_column(StringUUID) @@ -144,14 +144,12 @@ class Workflow(Base): server_default=func.current_timestamp(), onupdate=func.current_timestamp(), ) - _environment_variables: Mapped[str] = mapped_column( - "environment_variables", sa.Text, nullable=False, server_default="{}" - ) + _environment_variables: Mapped[str] = mapped_column("environment_variables", LongText, nullable=False, default="{}") _conversation_variables: Mapped[str] = mapped_column( - "conversation_variables", sa.Text, nullable=False, server_default="{}" + "conversation_variables", LongText, nullable=False, default="{}" ) _rag_pipeline_variables: Mapped[str] = mapped_column( - "rag_pipeline_variables", sa.Text, nullable=False, server_default="{}" + "rag_pipeline_variables", LongText, nullable=False, default="{}" ) VERSION_DRAFT = "draft" @@ -588,7 +586,7 @@ class WorkflowRun(Base): sa.Index("workflow_run_triggerd_from_idx", "tenant_id", "app_id", "triggered_from"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID) @@ -596,14 +594,11 @@ class WorkflowRun(Base): type: Mapped[str] = mapped_column(String(255)) triggered_from: Mapped[str] = mapped_column(String(255)) version: Mapped[str] = mapped_column(String(255)) - graph: Mapped[str | None] = mapped_column(sa.Text) - inputs: Mapped[str | None] = mapped_column(sa.Text) - status: Mapped[str] = mapped_column( - EnumText(WorkflowExecutionStatus, length=255), - nullable=False, - ) - outputs: Mapped[str | None] = mapped_column(sa.Text, default="{}") - error: Mapped[str | None] = mapped_column(sa.Text) + graph: Mapped[str | None] = mapped_column(LongText) + inputs: Mapped[str | None] = mapped_column(LongText) + status: Mapped[str] = mapped_column(String(255)) # running, succeeded, failed, stopped, partial-succeeded + outputs: Mapped[str | None] = mapped_column(LongText, default="{}") + error: Mapped[str | None] = mapped_column(LongText) elapsed_time: Mapped[float] = mapped_column(sa.Float, nullable=False, server_default=sa.text("0")) total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0")) total_steps: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True) @@ -811,7 +806,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo ), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID) workflow_id: Mapped[str] = mapped_column(StringUUID) @@ -823,13 +818,13 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo node_id: Mapped[str] = mapped_column(String(255)) node_type: Mapped[str] = mapped_column(String(255)) title: Mapped[str] = mapped_column(String(255)) - inputs: Mapped[str | None] = mapped_column(sa.Text) - process_data: Mapped[str | None] = mapped_column(sa.Text) - outputs: Mapped[str | None] = mapped_column(sa.Text) + inputs: Mapped[str | None] = mapped_column(LongText) + process_data: Mapped[str | None] = mapped_column(LongText) + outputs: Mapped[str | None] = mapped_column(LongText) status: Mapped[str] = mapped_column(String(255)) - error: Mapped[str | None] = mapped_column(sa.Text) + error: Mapped[str | None] = mapped_column(LongText) elapsed_time: Mapped[float] = mapped_column(sa.Float, server_default=sa.text("0")) - execution_metadata: Mapped[str | None] = mapped_column(sa.Text) + execution_metadata: Mapped[str | None] = mapped_column(LongText) created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp()) created_by_role: Mapped[str] = mapped_column(String(255)) created_by: Mapped[str] = mapped_column(StringUUID) @@ -986,7 +981,7 @@ class WorkflowNodeExecutionOffload(Base): id: Mapped[str] = mapped_column( StringUUID, primary_key=True, - server_default=sa.text("uuidv7()"), + default=lambda: str(uuid4()), ) created_at: Mapped[datetime] = mapped_column( @@ -1095,7 +1090,7 @@ class WorkflowAppLog(Base): sa.Index("workflow_app_log_workflow_run_id_idx", "workflow_run_id"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) tenant_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID) workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -1150,7 +1145,7 @@ class ConversationVariable(Base): id: Mapped[str] = mapped_column(StringUUID, primary_key=True) conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True, index=True) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True) - data: Mapped[str] = mapped_column(sa.Text, nullable=False) + data: Mapped[str] = mapped_column(LongText, nullable=False) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), index=True ) @@ -1214,7 +1209,7 @@ class WorkflowDraftVariable(Base): __allow_unmapped__ = True # id is the unique identifier of a draft variable. - id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) created_at: Mapped[datetime] = mapped_column( DateTime, @@ -1280,7 +1275,7 @@ class WorkflowDraftVariable(Base): # The variable's value serialized as a JSON string # # If the variable is offloaded, `value` contains a truncated version, not the full original value. - value: Mapped[str] = mapped_column(sa.Text, nullable=False, name="value") + value: Mapped[str] = mapped_column(LongText, nullable=False, name="value") # Controls whether the variable should be displayed in the variable inspection panel visible: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True) @@ -1592,8 +1587,7 @@ class WorkflowDraftVariableFile(Base): id: Mapped[str] = mapped_column( StringUUID, primary_key=True, - default=uuidv7, - server_default=sa.text("uuidv7()"), + default=lambda: str(uuidv7()), ) created_at: Mapped[datetime] = mapped_column( diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index 0d52c56138..be7a521881 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -35,6 +35,7 @@ from core.workflow.entities.workflow_pause import WorkflowPauseEntity from core.workflow.enums import WorkflowExecutionStatus from extensions.ext_storage import storage from libs.datetime_utils import naive_utc_now +from libs.helper import convert_datetime_to_date from libs.infinite_scroll_pagination import InfiniteScrollPagination from libs.time_parser import get_time_threshold from libs.uuid_utils import uuidv7 @@ -599,8 +600,9 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): """ Get daily runs statistics using raw SQL for optimal performance. """ - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, COUNT(id) AS runs FROM workflow_runs @@ -646,8 +648,9 @@ WHERE """ Get daily terminals statistics using raw SQL for optimal performance. """ - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, COUNT(DISTINCT created_by) AS terminal_count FROM workflow_runs @@ -693,8 +696,9 @@ WHERE """ Get daily token cost statistics using raw SQL for optimal performance. """ - sql_query = """SELECT - DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + converted_created_at = convert_datetime_to_date("created_at") + sql_query = f"""SELECT + {converted_created_at} AS date, SUM(total_tokens) AS token_count FROM workflow_runs @@ -751,7 +755,7 @@ WHERE FROM ( SELECT - DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, + {convert_datetime_to_date("c.created_at")} AS date, c.created_by, COUNT(c.id) AS interactions FROM diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index b1cc963681..5413725798 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -14,7 +14,6 @@ from core.tools.utils.workflow_configuration_sync import WorkflowToolConfigurati from core.tools.workflow_as_tool.provider import WorkflowToolProviderController from core.tools.workflow_as_tool.tool import WorkflowTool from extensions.ext_database import db -from libs.uuid_utils import uuidv7 from models.model import App from models.tools import WorkflowToolProvider from models.workflow import Workflow @@ -67,7 +66,6 @@ class WorkflowToolManageService: with Session(db.engine, expire_on_commit=False) as session, session.begin(): workflow_tool_provider = WorkflowToolProvider( - id=str(uuidv7()), tenant_id=tenant_id, user_id=user_id, app_id=workflow_app_id, diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index c5d1f6ab13..f299ce3baa 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -7,7 +7,8 @@ from enum import StrEnum from typing import Any, ClassVar from sqlalchemy import Engine, orm, select -from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.dialects.mysql import insert as mysql_insert +from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.sql.expression import and_, or_ @@ -627,28 +628,51 @@ def _batch_upsert_draft_variable( # # For these reasons, we use the SQLAlchemy query builder and rely on dialect-specific # insert operations instead of the ORM layer. - stmt = insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars]) - if policy == _UpsertPolicy.OVERWRITE: - stmt = stmt.on_conflict_do_update( - index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(), - set_={ + + # Use different insert statements based on database type + if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql": + stmt = pg_insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars]) + if policy == _UpsertPolicy.OVERWRITE: + stmt = stmt.on_conflict_do_update( + index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(), + set_={ + # Refresh creation timestamp to ensure updated variables + # appear first in chronologically sorted result sets. + "created_at": stmt.excluded.created_at, + "updated_at": stmt.excluded.updated_at, + "last_edited_at": stmt.excluded.last_edited_at, + "description": stmt.excluded.description, + "value_type": stmt.excluded.value_type, + "value": stmt.excluded.value, + "visible": stmt.excluded.visible, + "editable": stmt.excluded.editable, + "node_execution_id": stmt.excluded.node_execution_id, + "file_id": stmt.excluded.file_id, + }, + ) + elif policy == _UpsertPolicy.IGNORE: + stmt = stmt.on_conflict_do_nothing(index_elements=WorkflowDraftVariable.unique_app_id_node_id_name()) + else: + stmt = mysql_insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars]) # type: ignore[assignment] + if policy == _UpsertPolicy.OVERWRITE: + stmt = stmt.on_duplicate_key_update( # type: ignore[attr-defined] # Refresh creation timestamp to ensure updated variables # appear first in chronologically sorted result sets. - "created_at": stmt.excluded.created_at, - "updated_at": stmt.excluded.updated_at, - "last_edited_at": stmt.excluded.last_edited_at, - "description": stmt.excluded.description, - "value_type": stmt.excluded.value_type, - "value": stmt.excluded.value, - "visible": stmt.excluded.visible, - "editable": stmt.excluded.editable, - "node_execution_id": stmt.excluded.node_execution_id, - "file_id": stmt.excluded.file_id, - }, - ) - elif policy == _UpsertPolicy.IGNORE: - stmt = stmt.on_conflict_do_nothing(index_elements=WorkflowDraftVariable.unique_app_id_node_id_name()) - else: + created_at=stmt.inserted.created_at, # type: ignore[attr-defined] + updated_at=stmt.inserted.updated_at, # type: ignore[attr-defined] + last_edited_at=stmt.inserted.last_edited_at, # type: ignore[attr-defined] + description=stmt.inserted.description, # type: ignore[attr-defined] + value_type=stmt.inserted.value_type, # type: ignore[attr-defined] + value=stmt.inserted.value, # type: ignore[attr-defined] + visible=stmt.inserted.visible, # type: ignore[attr-defined] + editable=stmt.inserted.editable, # type: ignore[attr-defined] + node_execution_id=stmt.inserted.node_execution_id, # type: ignore[attr-defined] + file_id=stmt.inserted.file_id, # type: ignore[attr-defined] + ) + elif policy == _UpsertPolicy.IGNORE: + stmt = stmt.prefix_with("IGNORE") + + if policy not in [_UpsertPolicy.OVERWRITE, _UpsertPolicy.IGNORE]: raise Exception("Invalid value for update policy.") session.execute(stmt) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py index 9b86671954..fa13790942 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py @@ -6,7 +6,6 @@ from faker import Faker from core.tools.entities.api_entities import ToolProviderApiEntity from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ToolProviderType -from libs.uuid_utils import uuidv7 from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider from services.plugin.plugin_service import PluginService from services.tools.tools_transform_service import ToolTransformService @@ -67,7 +66,6 @@ class TestToolTransformService: ) elif provider_type == "workflow": provider = WorkflowToolProvider( - id=str(uuidv7()), name=fake.company(), description=fake.text(max_nb_chars=100), icon='{"background": "#FF6B6B", "content": "🔧"}', @@ -760,7 +758,6 @@ class TestToolTransformService: # Create workflow tool provider provider = WorkflowToolProvider( - id=str(uuidv7()), name=fake.company(), description=fake.text(max_nb_chars=100), icon='{"background": "#FF6B6B", "content": "🔧"}', diff --git a/docker/.env.example b/docker/.env.example index 5cb948d835..7e2e9aa26d 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -224,15 +224,20 @@ NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false # ------------------------------ # Database Configuration -# The database uses PostgreSQL. Please use the public schema. -# It is consistent with the configuration in the 'db' service below. +# The database uses PostgreSQL or MySQL. OceanBase and seekdb are also supported. Please use the public schema. +# It is consistent with the configuration in the database service below. +# You can adjust the database configuration according to your needs. # ------------------------------ +# Database type, supported values are `postgresql` and `mysql` +DB_TYPE=postgresql + DB_USERNAME=postgres DB_PASSWORD=difyai123456 -DB_HOST=db +DB_HOST=db_postgres DB_PORT=5432 DB_DATABASE=dify + # The size of the database connection pool. # The default is 30 connections, which can be appropriately increased. SQLALCHEMY_POOL_SIZE=30 @@ -294,6 +299,29 @@ POSTGRES_STATEMENT_TIMEOUT=0 # A value of 0 prevents the server from terminating idle sessions. POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 +# MySQL Performance Configuration +# Maximum number of connections to MySQL +# +# Default is 1000 +MYSQL_MAX_CONNECTIONS=1000 + +# InnoDB buffer pool size +# Default is 512M +# Recommended value: 70-80% of available memory for dedicated MySQL server +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_buffer_pool_size +MYSQL_INNODB_BUFFER_POOL_SIZE=512M + +# InnoDB log file size +# Default is 128M +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_log_file_size +MYSQL_INNODB_LOG_FILE_SIZE=128M + +# InnoDB flush log at transaction commit +# Default is 2 (flush to OS cache, sync every second) +# Options: 0 (no flush), 1 (flush and sync), 2 (flush to OS cache) +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_flush_log_at_trx_commit +MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2 + # ------------------------------ # Redis Configuration # This Redis configuration is used for caching and for pub/sub during conversation. @@ -488,7 +516,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`. +# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`. VECTOR_STORE=weaviate # Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index @@ -498,6 +526,23 @@ WEAVIATE_ENDPOINT=http://weaviate:8080 WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051 +# For OceanBase metadata database configuration, available when `DB_TYPE` is `mysql` and `COMPOSE_PROFILES` includes `oceanbase`. +# For OceanBase vector database configuration, available when `VECTOR_STORE` is `oceanbase` +# If you want to use OceanBase as both vector database and metadata database, you need to set `DB_TYPE` to `mysql`, `COMPOSE_PROFILES` is `oceanbase`, and set Database Configuration is the same as the vector database. +# seekdb is the lite version of OceanBase and shares the connection configuration with OceanBase. +OCEANBASE_VECTOR_HOST=oceanbase +OCEANBASE_VECTOR_PORT=2881 +OCEANBASE_VECTOR_USER=root@test +OCEANBASE_VECTOR_PASSWORD=difyai123456 +OCEANBASE_VECTOR_DATABASE=test +OCEANBASE_CLUSTER_NAME=difyai +OCEANBASE_MEMORY_LIMIT=6G +OCEANBASE_ENABLE_HYBRID_SEARCH=false +# For OceanBase vector database, built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik` +# For OceanBase vector database, external fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser` +OCEANBASE_FULLTEXT_PARSER=ik +SEEKDB_MEMORY_LIMIT=2G + # The Qdrant endpoint URL. Only available when VECTOR_STORE is `qdrant`. QDRANT_URL=http://qdrant:6333 QDRANT_API_KEY=difyai123456 @@ -703,19 +748,6 @@ LINDORM_PASSWORD=admin LINDORM_USING_UGC=True LINDORM_QUERY_TIMEOUT=1 -# OceanBase Vector configuration, only available when VECTOR_STORE is `oceanbase` -# Built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik` -# External fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser` -OCEANBASE_VECTOR_HOST=oceanbase -OCEANBASE_VECTOR_PORT=2881 -OCEANBASE_VECTOR_USER=root@test -OCEANBASE_VECTOR_PASSWORD=difyai123456 -OCEANBASE_VECTOR_DATABASE=test -OCEANBASE_CLUSTER_NAME=difyai -OCEANBASE_MEMORY_LIMIT=6G -OCEANBASE_ENABLE_HYBRID_SEARCH=false -OCEANBASE_FULLTEXT_PARSER=ik - # opengauss configurations, only available when VECTOR_STORE is `opengauss` OPENGAUSS_HOST=opengauss OPENGAUSS_PORT=6600 @@ -1039,7 +1071,7 @@ ALLOW_UNSAFE_DATA_SCHEME=false MAX_TREE_DEPTH=50 # ------------------------------ -# Environment Variables for db Service +# Environment Variables for database Service # ------------------------------ # The name of the default postgres user. @@ -1048,9 +1080,19 @@ POSTGRES_USER=${DB_USERNAME} POSTGRES_PASSWORD=${DB_PASSWORD} # The name of the default postgres database. POSTGRES_DB=${DB_DATABASE} -# postgres data directory +# Postgres data directory PGDATA=/var/lib/postgresql/data/pgdata +# MySQL Default Configuration +# The name of the default mysql user. +MYSQL_USERNAME=${DB_USERNAME} +# The password for the default mysql user. +MYSQL_PASSWORD=${DB_PASSWORD} +# The name of the default mysql database. +MYSQL_DATABASE=${DB_DATABASE} +# MySQL data directory +MYSQL_HOST_VOLUME=./volumes/mysql/data + # ------------------------------ # Environment Variables for sandbox Service # ------------------------------ @@ -1210,12 +1252,12 @@ SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 SSRF_POOL_KEEPALIVE_EXPIRY=5.0 # ------------------------------ -# docker env var for specifying vector db type at startup -# (based on the vector db type, the corresponding docker +# docker env var for specifying vector db and metadata db type at startup +# (based on the vector db and metadata db type, the corresponding docker # compose profile will be used) # if you want to use unstructured, add ',unstructured' to the end # ------------------------------ -COMPOSE_PROFILES=${VECTOR_STORE:-weaviate} +COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql} # ------------------------------ # Docker Compose Service Expose Host Port Configurations @@ -1383,4 +1425,4 @@ WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100 WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0 # Tenant isolated task queue configuration -TENANT_ISOLATED_TASK_CONCURRENCY=1 +TENANT_ISOLATED_TASK_CONCURRENCY=1 \ No newline at end of file diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index e01437689d..eb0733e414 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -17,8 +17,18 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started volumes: @@ -44,8 +54,18 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started volumes: @@ -66,8 +86,18 @@ services: # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. MODE: beat depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started networks: @@ -101,11 +131,12 @@ services: ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true} ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true} ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true} - NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false} - - # The postgres database. - db: + + # The PostgreSQL database. + db_postgres: image: postgres:15-alpine + profiles: + - postgresql restart: always environment: POSTGRES_USER: ${POSTGRES_USER:-postgres} @@ -128,16 +159,46 @@ services: "CMD", "pg_isready", "-h", - "db", + "db_postgres", "-U", "${PGUSER:-postgres}", "-d", - "${POSTGRES_DB:-dify}", + "${DB_DATABASE:-dify}", ] interval: 1s timeout: 3s retries: 60 + # The mysql database. + db_mysql: + image: mysql:8.0 + profiles: + - mysql + restart: always + environment: + MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456} + MYSQL_DATABASE: ${MYSQL_DATABASE:-dify} + command: > + --max_connections=1000 + --innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M} + --innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M} + --innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2} + volumes: + - ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql + healthcheck: + test: + [ + "CMD", + "mysqladmin", + "ping", + "-u", + "root", + "-p${MYSQL_PASSWORD:-difyai123456}", + ] + interval: 1s + timeout: 3s + retries: 30 + # The redis cache. redis: image: redis:6-alpine @@ -238,8 +299,18 @@ services: volumes: - ./volumes/plugin_daemon:/app/storage depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false # ssrf_proxy server # for more information, please refer to @@ -355,6 +426,63 @@ services: AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true} AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} + # OceanBase vector database + oceanbase: + image: oceanbase/oceanbase-ce:4.3.5-lts + container_name: oceanbase + profiles: + - oceanbase + restart: always + volumes: + - ./volumes/oceanbase/data:/root/ob + - ./volumes/oceanbase/conf:/root/.obd/cluster + - ./volumes/oceanbase/init.d:/root/boot/init.d + environment: + OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} + OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} + OB_SERVER_IP: 127.0.0.1 + MODE: mini + LANG: en_US.UTF-8 + ports: + - "${OCEANBASE_VECTOR_PORT:-2881}:2881" + healthcheck: + test: + [ + "CMD-SHELL", + 'obclient -h127.0.0.1 -P2881 -uroot@test -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"', + ] + interval: 10s + retries: 30 + start_period: 30s + timeout: 10s + + # seekdb vector database + seekdb: + image: oceanbase/seekdb:latest + container_name: seekdb + profiles: + - seekdb + restart: always + volumes: + - ./volumes/seekdb:/var/lib/oceanbase + environment: + ROOT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G} + REPORTER: dify-ai-seekdb + ports: + - "${OCEANBASE_VECTOR_PORT:-2881}:2881" + healthcheck: + test: + [ + "CMD-SHELL", + 'mysql -h127.0.0.1 -P2881 -uroot -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"', + ] + interval: 5s + retries: 60 + timeout: 5s + # Qdrant vector store. # (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.) qdrant: @@ -490,38 +618,6 @@ services: CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider} IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE} - # OceanBase vector database - oceanbase: - image: oceanbase/oceanbase-ce:4.3.5-lts - container_name: oceanbase - profiles: - - oceanbase - restart: always - volumes: - - ./volumes/oceanbase/data:/root/ob - - ./volumes/oceanbase/conf:/root/.obd/cluster - - ./volumes/oceanbase/init.d:/root/boot/init.d - environment: - OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} - OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} - OB_SERVER_IP: 127.0.0.1 - MODE: mini - LANG: en_US.UTF-8 - ports: - - "${OCEANBASE_VECTOR_PORT:-2881}:2881" - healthcheck: - test: - [ - "CMD-SHELL", - 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"', - ] - interval: 10s - retries: 30 - start_period: 30s - timeout: 10s - # Oracle vector database oracle: image: container-registry.oracle.com/database/free:latest diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index b93457f8dc..b409e3d26d 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -1,7 +1,10 @@ services: # The postgres database. - db: + db_postgres: image: postgres:15-alpine + profiles: + - "" + - postgresql restart: always env_file: - ./middleware.env @@ -27,7 +30,7 @@ services: "CMD", "pg_isready", "-h", - "db", + "db_postgres", "-U", "${PGUSER:-postgres}", "-d", @@ -37,6 +40,39 @@ services: timeout: 3s retries: 30 + db_mysql: + image: mysql:8.0 + profiles: + - mysql + restart: always + env_file: + - ./middleware.env + environment: + MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456} + MYSQL_DATABASE: ${MYSQL_DATABASE:-dify} + command: > + --max_connections=1000 + --innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M} + --innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M} + --innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2} + volumes: + - ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql + ports: + - "${EXPOSE_MYSQL_PORT:-3306}:3306" + healthcheck: + test: + [ + "CMD", + "mysqladmin", + "ping", + "-u", + "root", + "-p${MYSQL_PASSWORD:-difyai123456}", + ] + interval: 1s + timeout: 3s + retries: 30 + # The redis cache. redis: image: redis:6-alpine @@ -93,10 +129,6 @@ services: - ./middleware.env environment: # Use the shared environment variables. - DB_HOST: ${DB_HOST:-db} - DB_PORT: ${DB_PORT:-5432} - DB_USERNAME: ${DB_USER:-postgres} - DB_PASSWORD: ${DB_PASSWORD:-difyai123456} DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} REDIS_HOST: ${REDIS_HOST:-redis} REDIS_PORT: ${REDIS_PORT:-6379} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 0117ebce3f..d1e970719c 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -53,9 +53,10 @@ x-shared-env: &shared-api-worker-env ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true} ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true} NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false} + DB_TYPE: ${DB_TYPE:-postgresql} DB_USERNAME: ${DB_USERNAME:-postgres} DB_PASSWORD: ${DB_PASSWORD:-difyai123456} - DB_HOST: ${DB_HOST:-db} + DB_HOST: ${DB_HOST:-db_postgres} DB_PORT: ${DB_PORT:-5432} DB_DATABASE: ${DB_DATABASE:-dify} SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE:-30} @@ -72,6 +73,10 @@ x-shared-env: &shared-api-worker-env POSTGRES_EFFECTIVE_CACHE_SIZE: ${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB} POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-0} POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0} + MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS:-1000} + MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M} + MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE:-128M} + MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT: ${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2} REDIS_HOST: ${REDIS_HOST:-redis} REDIS_PORT: ${REDIS_PORT:-6379} REDIS_USERNAME: ${REDIS_USERNAME:-} @@ -159,6 +164,16 @@ x-shared-env: &shared-api-worker-env WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080} WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih} WEAVIATE_GRPC_ENDPOINT: ${WEAVIATE_GRPC_ENDPOINT:-grpc://weaviate:50051} + OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase} + OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881} + OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test} + OCEANBASE_VECTOR_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + OCEANBASE_VECTOR_DATABASE: ${OCEANBASE_VECTOR_DATABASE:-test} + OCEANBASE_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} + OCEANBASE_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} + OCEANBASE_ENABLE_HYBRID_SEARCH: ${OCEANBASE_ENABLE_HYBRID_SEARCH:-false} + OCEANBASE_FULLTEXT_PARSER: ${OCEANBASE_FULLTEXT_PARSER:-ik} + SEEKDB_MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G} QDRANT_URL: ${QDRANT_URL:-http://qdrant:6333} QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456} QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20} @@ -314,15 +329,6 @@ x-shared-env: &shared-api-worker-env LINDORM_PASSWORD: ${LINDORM_PASSWORD:-admin} LINDORM_USING_UGC: ${LINDORM_USING_UGC:-True} LINDORM_QUERY_TIMEOUT: ${LINDORM_QUERY_TIMEOUT:-1} - OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase} - OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881} - OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test} - OCEANBASE_VECTOR_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OCEANBASE_VECTOR_DATABASE: ${OCEANBASE_VECTOR_DATABASE:-test} - OCEANBASE_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} - OCEANBASE_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} - OCEANBASE_ENABLE_HYBRID_SEARCH: ${OCEANBASE_ENABLE_HYBRID_SEARCH:-false} - OCEANBASE_FULLTEXT_PARSER: ${OCEANBASE_FULLTEXT_PARSER:-ik} OPENGAUSS_HOST: ${OPENGAUSS_HOST:-opengauss} OPENGAUSS_PORT: ${OPENGAUSS_PORT:-6600} OPENGAUSS_USER: ${OPENGAUSS_USER:-postgres} @@ -451,6 +457,10 @@ x-shared-env: &shared-api-worker-env POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}} POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}} PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata} + MYSQL_USERNAME: ${MYSQL_USERNAME:-${DB_USERNAME}} + MYSQL_PASSWORD: ${MYSQL_PASSWORD:-${DB_PASSWORD}} + MYSQL_DATABASE: ${MYSQL_DATABASE:-${DB_DATABASE}} + MYSQL_HOST_VOLUME: ${MYSQL_HOST_VOLUME:-./volumes/mysql/data} SANDBOX_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox} SANDBOX_GIN_MODE: ${SANDBOX_GIN_MODE:-release} SANDBOX_WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15} @@ -640,8 +650,18 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started volumes: @@ -667,8 +687,18 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started volumes: @@ -689,8 +719,18 @@ services: # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. MODE: beat depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false redis: condition: service_started networks: @@ -724,11 +764,12 @@ services: ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true} ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true} ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true} - NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false} - - # The postgres database. - db: + + # The PostgreSQL database. + db_postgres: image: postgres:15-alpine + profiles: + - postgresql restart: always environment: POSTGRES_USER: ${POSTGRES_USER:-postgres} @@ -751,16 +792,46 @@ services: "CMD", "pg_isready", "-h", - "db", + "db_postgres", "-U", "${PGUSER:-postgres}", "-d", - "${POSTGRES_DB:-dify}", + "${DB_DATABASE:-dify}", ] interval: 1s timeout: 3s retries: 60 + # The mysql database. + db_mysql: + image: mysql:8.0 + profiles: + - mysql + restart: always + environment: + MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:-difyai123456} + MYSQL_DATABASE: ${MYSQL_DATABASE:-dify} + command: > + --max_connections=1000 + --innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M} + --innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M} + --innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2} + volumes: + - ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql + healthcheck: + test: + [ + "CMD", + "mysqladmin", + "ping", + "-u", + "root", + "-p${MYSQL_PASSWORD:-difyai123456}", + ] + interval: 1s + timeout: 3s + retries: 30 + # The redis cache. redis: image: redis:6-alpine @@ -861,8 +932,18 @@ services: volumes: - ./volumes/plugin_daemon:/app/storage depends_on: - db: + db_postgres: condition: service_healthy + required: false + db_mysql: + condition: service_healthy + required: false + oceanbase: + condition: service_healthy + required: false + seekdb: + condition: service_healthy + required: false # ssrf_proxy server # for more information, please refer to @@ -978,6 +1059,63 @@ services: AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true} AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} + # OceanBase vector database + oceanbase: + image: oceanbase/oceanbase-ce:4.3.5-lts + container_name: oceanbase + profiles: + - oceanbase + restart: always + volumes: + - ./volumes/oceanbase/data:/root/ob + - ./volumes/oceanbase/conf:/root/.obd/cluster + - ./volumes/oceanbase/init.d:/root/boot/init.d + environment: + OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} + OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} + OB_SERVER_IP: 127.0.0.1 + MODE: mini + LANG: en_US.UTF-8 + ports: + - "${OCEANBASE_VECTOR_PORT:-2881}:2881" + healthcheck: + test: + [ + "CMD-SHELL", + 'obclient -h127.0.0.1 -P2881 -uroot@test -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"', + ] + interval: 10s + retries: 30 + start_period: 30s + timeout: 10s + + # seekdb vector database + seekdb: + image: oceanbase/seekdb:latest + container_name: seekdb + profiles: + - seekdb + restart: always + volumes: + - ./volumes/seekdb:/var/lib/oceanbase + environment: + ROOT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} + MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G} + REPORTER: dify-ai-seekdb + ports: + - "${OCEANBASE_VECTOR_PORT:-2881}:2881" + healthcheck: + test: + [ + "CMD-SHELL", + 'mysql -h127.0.0.1 -P2881 -uroot -p${OCEANBASE_VECTOR_PASSWORD:-difyai123456} -e "SELECT 1;"', + ] + interval: 5s + retries: 60 + timeout: 5s + # Qdrant vector store. # (if used, you need to set VECTOR_STORE to qdrant in the api & worker service.) qdrant: @@ -1113,38 +1251,6 @@ services: CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider} IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE} - # OceanBase vector database - oceanbase: - image: oceanbase/oceanbase-ce:4.3.5-lts - container_name: oceanbase - profiles: - - oceanbase - restart: always - volumes: - - ./volumes/oceanbase/data:/root/ob - - ./volumes/oceanbase/conf:/root/.obd/cluster - - ./volumes/oceanbase/init.d:/root/boot/init.d - environment: - OB_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} - OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} - OB_SERVER_IP: 127.0.0.1 - MODE: mini - LANG: en_US.UTF-8 - ports: - - "${OCEANBASE_VECTOR_PORT:-2881}:2881" - healthcheck: - test: - [ - "CMD-SHELL", - 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"', - ] - interval: 10s - retries: 30 - start_period: 30s - timeout: 10s - # Oracle vector database oracle: image: container-registry.oracle.com/database/free:latest diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 24629c2d89..3374ddd537 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -1,11 +1,21 @@ # ------------------------------ # Environment Variables for db Service # ------------------------------ -POSTGRES_USER=postgres +# Database Configuration +# Database type, supported values are `postgresql` and `mysql` +DB_TYPE=postgresql +DB_USERNAME=postgres +DB_PASSWORD=difyai123456 +DB_HOST=db_postgres +DB_PORT=5432 +DB_DATABASE=dify + +# PostgreSQL Configuration +POSTGRES_USER=${DB_USERNAME} # The password for the default postgres user. -POSTGRES_PASSWORD=difyai123456 +POSTGRES_PASSWORD=${DB_PASSWORD} # The name of the default postgres database. -POSTGRES_DB=dify +POSTGRES_DB=${DB_DATABASE} # postgres data directory PGDATA=/var/lib/postgresql/data/pgdata PGDATA_HOST_VOLUME=./volumes/db/data @@ -54,6 +64,37 @@ POSTGRES_STATEMENT_TIMEOUT=0 # A value of 0 prevents the server from terminating idle sessions. POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 +# MySQL Configuration +MYSQL_USERNAME=${DB_USERNAME} +# MySQL password +MYSQL_PASSWORD=${DB_PASSWORD} +# MySQL database name +MYSQL_DATABASE=${DB_DATABASE} +# MySQL data directory host volume +MYSQL_HOST_VOLUME=./volumes/mysql/data + +# MySQL Performance Configuration +# Maximum number of connections to MySQL +# Default is 1000 +MYSQL_MAX_CONNECTIONS=1000 + +# InnoDB buffer pool size +# Default is 512M +# Recommended value: 70-80% of available memory for dedicated MySQL server +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_buffer_pool_size +MYSQL_INNODB_BUFFER_POOL_SIZE=512M + +# InnoDB log file size +# Default is 128M +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_log_file_size +MYSQL_INNODB_LOG_FILE_SIZE=128M + +# InnoDB flush log at transaction commit +# Default is 2 (flush to OS cache, sync every second) +# Options: 0 (no flush), 1 (flush and sync), 2 (flush to OS cache) +# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_flush_log_at_trx_commit +MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2 + # ----------------------------- # Environment Variables for redis Service # ----------------------------- @@ -97,6 +138,7 @@ WEAVIATE_HOST_VOLUME=./volumes/weaviate # Docker Compose Service Expose Host Port Configurations # ------------------------------ EXPOSE_POSTGRES_PORT=5432 +EXPOSE_MYSQL_PORT=3306 EXPOSE_REDIS_PORT=6379 EXPOSE_SANDBOX_PORT=8194 EXPOSE_SSRF_PROXY_PORT=3128