Merge branch 'feat/rag-2' into fix/response-ts-error

This commit is contained in:
Wu Tianwei 2025-09-17 16:07:36 +08:00 committed by GitHub
commit da6b04656d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 223 additions and 944 deletions

View File

@ -3,9 +3,9 @@ from threading import Lock
from typing import TYPE_CHECKING
from contexts.wrapper import RecyclableContextVar
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
if TYPE_CHECKING:
from core.datasource.__base.datasource_provider import DatasourcePluginProviderController
from core.model_runtime.entities.model_entities import AIModelEntity
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
from core.tools.plugin_tool.provider import PluginToolProviderController

View File

@ -1,113 +0,0 @@
"""add_pipeline_info
Revision ID: b35c3db83d09
Revises: d28f2004b072
Create Date: 2025-05-15 15:58:05.179877
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'b35c3db83d09'
down_revision = 'c20211f18133'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
sa.Column('mode', sa.String(length=255), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
op.create_table('tool_builtin_datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=256), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_datasource_provider')
)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_column('rag_pipeline_variables')
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('chunk_structure')
batch_op.drop_column('pipeline_id')
batch_op.drop_column('runtime_mode')
batch_op.drop_column('icon_info')
batch_op.drop_column('keyword_number')
op.drop_table('tool_builtin_datasource_providers')
op.drop_table('pipelines')
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.drop_index('pipeline_customized_template_tenant_idx')
op.drop_table('pipeline_customized_templates')
op.drop_table('pipeline_built_in_templates')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_2
Revision ID: abb18a379e62
Revises: b35c3db83d09
Create Date: 2025-05-16 16:59:16.423127
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'abb18a379e62'
down_revision = 'b35c3db83d09'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipelines', schema=None) as batch_op:
batch_op.drop_column('mode')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipelines', schema=None) as batch_op:
batch_op.add_column(sa.Column('mode', sa.VARCHAR(length=255), autoincrement=False, nullable=False))
# ### end Alembic commands ###

View File

@ -1,70 +0,0 @@
"""add_pipeline_info_3
Revision ID: c459994abfa8
Revises: abb18a379e62
Create Date: 2025-05-30 00:33:14.068312
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'c459994abfa8'
down_revision = 'abb18a379e62'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_provider_plugin_id_provider_idx')
)
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=False))
batch_op.add_column(sa.Column('yaml_content', sa.Text(), nullable=False))
batch_op.drop_column('pipeline_id')
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=False))
batch_op.add_column(sa.Column('yaml_content', sa.Text(), nullable=False))
batch_op.drop_column('pipeline_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('pipeline_id', sa.UUID(), autoincrement=False, nullable=False))
batch_op.drop_column('yaml_content')
batch_op.drop_column('chunk_structure')
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('pipeline_id', sa.UUID(), autoincrement=False, nullable=False))
batch_op.drop_column('yaml_content')
batch_op.drop_column('chunk_structure')
op.drop_table('datasource_providers')
op.drop_table('datasource_oauth_params')
# ### end Alembic commands ###

View File

@ -1,37 +0,0 @@
"""add_pipeline_info_4
Revision ID: e4fb49a4fe86
Revises: c459994abfa8
Create Date: 2025-05-30 00:52:49.222558
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'e4fb49a4fe86'
down_revision = 'c459994abfa8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.UUID(),
type_=sa.TEXT(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.TEXT(),
type_=sa.UUID(),
existing_nullable=False)
# ### end Alembic commands ###

View File

@ -1,35 +0,0 @@
"""add_pipeline_info_5
Revision ID: d466c551816f
Revises: e4fb49a4fe86
Create Date: 2025-06-05 13:56:05.962215
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd466c551816f'
down_revision = 'e4fb49a4fe86'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('datasource_provider_plugin_id_provider_idx'), type_='unique')
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_index('datasource_provider_auth_type_provider_idx')
batch_op.create_unique_constraint(batch_op.f('datasource_provider_plugin_id_provider_idx'), ['plugin_id', 'provider'])
# ### end Alembic commands ###

View File

@ -1,43 +0,0 @@
"""add_pipeline_info_6
Revision ID: 224fba149d48
Revises: d466c551816f
Create Date: 2025-06-11 11:55:01.179201
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '224fba149d48'
down_revision = 'd466c551816f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), nullable=False))
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), nullable=True))
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), nullable=False))
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.drop_column('updated_by')
batch_op.drop_column('created_by')
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.drop_column('updated_by')
batch_op.drop_column('created_by')
# ### end Alembic commands ###

View File

@ -1,45 +0,0 @@
"""add_pipeline_info_7
Revision ID: 70a0fc0c013f
Revises: 224fba149d48
Create Date: 2025-06-17 19:05:39.920953
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '70a0fc0c013f'
down_revision = '224fba149d48'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', sa.Text(), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.drop_index('document_pipeline_execution_logs_document_id_idx')
op.drop_table('document_pipeline_execution_logs')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_8
Revision ID: a1025f709c06
Revises: 70a0fc0c013f
Create Date: 2025-06-19 15:25:41.263120
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a1025f709c06'
down_revision = '70a0fc0c013f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.add_column(sa.Column('datasource_node_id', sa.String(length=255), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.drop_column('datasource_node_id')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_9
Revision ID: 15e40b74a6d2
Revises: a1025f709c06
Create Date: 2025-07-02 11:32:44.125790
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '15e40b74a6d2'
down_revision = 'a1025f709c06'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=255), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_column('name')
# ### end Alembic commands ###

View File

@ -1,50 +0,0 @@
"""datasource_oauth_1
Revision ID: d4a76fde2724
Revises: bb3812d469dd
Create Date: 2025-07-18 14:09:42.551752
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd4a76fde2724'
down_revision = '15e40b74a6d2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.UUID(),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.TEXT(),
type_=sa.String(length=255),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.TEXT(),
existing_nullable=False)
with op.batch_alter_table('datasource_oauth_params', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.UUID(),
existing_nullable=False)
# ### end Alembic commands ###

View File

@ -1,39 +0,0 @@
"""datasource_oauth_2
Revision ID: 1bce102aef9a
Revises: d4a76fde2724
Create Date: 2025-07-18 14:46:11.392061
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '1bce102aef9a'
down_revision = 'd4a76fde2724'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_builtin_datasource_providers')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_builtin_datasource_providers',
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=True),
sa.Column('user_id', sa.UUID(), autoincrement=False, nullable=False),
sa.Column('provider', sa.VARCHAR(length=256), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('tool_builtin_datasource_provider_pkey')),
sa.UniqueConstraint('tenant_id', 'provider', name=op.f('unique_builtin_datasource_provider'), postgresql_include=[], postgresql_nulls_not_distinct=False)
)
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_12
Revision ID: 2008609cf2bb
Revises: 1bce102aef9a
Create Date: 2025-07-18 18:25:21.280897
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2008609cf2bb'
down_revision = '1bce102aef9a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('avatar_url', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_column('avatar_url')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_13
Revision ID: fcb46171d891
Revises: 2008609cf2bb
Create Date: 2025-07-18 21:34:31.914500
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fcb46171d891'
down_revision = '2008609cf2bb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.create_unique_constraint('datasource_provider_unique_name', ['tenant_id', 'plugin_id', 'provider', 'name'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_constraint('datasource_provider_unique_name', type_='unique')
# ### end Alembic commands ###

View File

@ -1,40 +0,0 @@
"""add_pipeline_info_14
Revision ID: d3c68680d3ba
Revises: fcb46171d891
Create Date: 2025-07-21 12:20:29.582951
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd3c68680d3ba'
down_revision = 'fcb46171d891'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('datasource_oauth_tenant_params')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""add_pipeline_info_15
Revision ID: 74e5f667f4b7
Revises: d3c68680d3ba
Create Date: 2025-07-21 15:23:20.825747
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '74e5f667f4b7'
down_revision = 'd3c68680d3ba'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_column('is_default')
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""datasource_oauth_refresh
Revision ID: 17d4db47800c
Revises: 74e5f667f4b7
Create Date: 2025-08-11 11:38:03.662874
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '17d4db47800c'
down_revision = '74e5f667f4b7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('expires_at', sa.Integer(), nullable=False, server_default='-1'))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_column('expires_at')
# ### end Alembic commands ###

View File

@ -1,95 +0,0 @@
"""add WorkflowDraftVariableFile
Revision ID: 76db8b6ed8f1
Revises: 532b3f888abf
Create Date: 2025-08-12 16:01:40.318520
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "76db8b6ed8f1"
down_revision = "8c5088481127"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"workflow_draft_variable_files",
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column(
"tenant_id",
models.types.StringUUID(),
nullable=False,
comment="The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id",
),
sa.Column(
"app_id",
models.types.StringUUID(),
nullable=False,
comment="The application to which the WorkflowDraftVariableFile belongs, referencing App.id",
),
sa.Column(
"user_id",
models.types.StringUUID(),
nullable=False,
comment="The owner to of the WorkflowDraftVariableFile, referencing Account.id",
),
sa.Column(
"upload_file_id",
models.types.StringUUID(),
nullable=False,
comment="Reference to UploadFile containing the large variable data",
),
sa.Column("size", sa.BigInteger(), nullable=False, comment="Size of the original variable content in bytes"),
sa.Column(
"length",
sa.Integer(),
nullable=True,
comment=(
"Length of the original variable content. For array and array-like types, this represents the "
"number of elements. For object types, it indicates the number of keys. For other types, "
"the value is NULL."
)
),
sa.Column("value_type", sa.String(20), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variable_files_pkey")),
)
with op.batch_alter_table("workflow_draft_variables", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"file_id",
models.types.StringUUID(),
nullable=True,
comment="Reference to WorkflowDraftVariableFile if variable is offloaded to external storage",
)
)
batch_op.add_column(
sa.Column(
"is_default_value",
sa.Boolean,
nullable=False,
server_default=sa.text(text="FALSE"),
comment="Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.",
)
)
batch_op.create_index("workflow_draft_variable_file_id_idx", ["file_id"], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("workflow_draft_variables", schema=None) as batch_op:
batch_op.drop_column("file_id")
op.drop_table("workflow_draft_variable_files")
# ### end Alembic commands ###

View File

@ -1,72 +0,0 @@
"""add WorkflowNodeExecutionOffload
Revision ID: b45e25c2d166
Revises: 76db8b6ed8f1
Create Date: 2025-08-21 15:59:00.329004
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "b45e25c2d166"
down_revision = "76db8b6ed8f1"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"workflow_node_execution_offload",
sa.Column(
"id",
models.types.StringUUID(),
server_default=sa.text("uuidv7()"),
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(),
server_default=sa.text("CURRENT_TIMESTAMP"),
nullable=False,
),
sa.Column(
"tenant_id",
models.types.StringUUID(),
nullable=False,
),
sa.Column(
"app_id",
models.types.StringUUID(),
nullable=False,
),
sa.Column(
"node_execution_id",
models.types.StringUUID(),
nullable=True,
),
sa.Column(
"type",
sa.String(20),
nullable=False,
),
sa.Column(
"file_id",
models.types.StringUUID(),
nullable=False,
),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_node_execution_offload_pkey")),
sa.UniqueConstraint(
"node_execution_id",
"type",
name=op.f("workflow_node_execution_offload_node_execution_id_key"),
postgresql_nulls_not_distinct=False,
),
)
def downgrade():
op.drop_table("workflow_node_execution_offload")

View File

@ -1,38 +0,0 @@
"""add_pipeline_info_17
Revision ID: 8c5088481127
Revises: 17d4db47800c
Create Date: 2025-09-01 14:43:48.417869
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8c5088481127'
down_revision = '17d4db47800c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.Text(), nullable=False),
sa.Column('provider_name', sa.Text(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('pipeline_recommended_plugins')
# ### end Alembic commands ###

View File

@ -1,35 +0,0 @@
"""add_pipeline_info_18
Revision ID: 0b2ca375fabe
Revises: b45e25c2d166
Create Date: 2025-09-12 14:29:38.078589
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0b2ca375fabe'
down_revision = 'b45e25c2d166'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('enable_api')
# ### end Alembic commands ###

View File

@ -0,0 +1,222 @@
"""knowledge_pipeline_migrate
Revision ID: 68519ad5cd18
Revises: cf7c38a32b2d
Create Date: 2025-09-17 15:15:50.697885
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '68519ad5cd18'
down_revision = 'cf7c38a32b2d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', sa.String(length=255), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', sa.Text(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.Text(), nullable=False),
sa.Column('provider_name', sa.Text(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'), postgresql_nulls_not_distinct=False)
)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_id', models.types.StringUUID(), nullable=True, comment='Reference to WorkflowDraftVariableFile if variable is offloaded to external storage'))
batch_op.add_column(
sa.Column(
'is_default_value', sa.Boolean(), nullable=False,
server_default=sa.text(text="FALSE"),
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
)
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_column('rag_pipeline_variables')
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.drop_index('workflow_draft_variable_file_id_idx')
batch_op.drop_column('is_default_value')
batch_op.drop_column('file_id')
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('enable_api')
batch_op.drop_column('chunk_structure')
batch_op.drop_column('pipeline_id')
batch_op.drop_column('runtime_mode')
batch_op.drop_column('icon_info')
batch_op.drop_column('keyword_number')
op.drop_table('workflow_node_execution_offload')
op.drop_table('workflow_draft_variable_files')
op.drop_table('pipelines')
op.drop_table('pipeline_recommended_plugins')
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.drop_index('pipeline_customized_template_tenant_idx')
op.drop_table('pipeline_customized_templates')
op.drop_table('pipeline_built_in_templates')
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.drop_index('document_pipeline_execution_logs_document_id_idx')
op.drop_table('document_pipeline_execution_logs')
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.drop_index('datasource_provider_auth_type_provider_idx')
op.drop_table('datasource_providers')
op.drop_table('datasource_oauth_tenant_params')
op.drop_table('datasource_oauth_params')
# ### end Alembic commands ###