Use SQLAlchemy expressions for is_deleted migration backup

This commit is contained in:
-LAN- 2025-11-27 18:04:48 +08:00
parent 26c2ad3d2b
commit 53c6c27a98
No known key found for this signature in database
GPG Key ID: 6BA0D108DED011FF
1 changed files with 43 additions and 28 deletions

View File

@ -4,8 +4,8 @@ Revision ID: 4f02b6704509
Revises: 7bb281b7a422
Create Date: 2025-09-02 20:12:37.311318
This migration runs on both PostgreSQL and MySQL. Avoid hardcoding boolean
literals so the SQL renders correctly for each dialect.
This migration runs on both PostgreSQL and MySQL. It uses SQLAlchemy
expressions instead of raw SQL so dialects handle boolean literals.
"""
from alembic import op
import models as models
@ -25,19 +25,35 @@ backup_table_name = 'conversations_4f02b6704509_bak'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
# Create backup table for soft-deleted conversations
context = op.get_context()
dialect = context.dialect.name if hasattr(context, "dialect") else op.get_bind().dialect.name
true_literal = "1" if dialect == "mysql" else "TRUE"
if context.is_offline_mode():
# In offline mode we cannot reflect columns; skip backup/data moves.
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.drop_column('is_deleted')
return
# Create backup table with all soft-deleted conversations (works even if zero rows)
op.execute(sa.text(f"""
CREATE TABLE {backup_table_name} AS
SELECT * FROM conversations WHERE is_deleted = {true_literal}
"""))
bind = op.get_bind()
metadata = sa.MetaData()
conversations = sa.Table('conversations', metadata, autoload_with=bind)
# Delete soft-deleted conversations from main table
op.execute(sa.text(f"DELETE FROM conversations WHERE is_deleted = {true_literal}"))
# Drop leftover backup table if it exists (idempotent reruns)
inspector = inspect(bind)
if backup_table_name in inspector.get_table_names():
op.drop_table(backup_table_name)
# Create backup table with identical schema
op.create_table(backup_table_name, *[col.copy() for col in conversations.columns])
backup_table = sa.Table(backup_table_name, metadata, autoload_with=bind)
# Copy soft-deleted rows into backup
insert_backup = sa.insert(backup_table).from_select(
conversations.columns.keys(),
sa.select(*conversations.c).where(conversations.c.is_deleted.is_(sa.true())),
)
bind.execute(insert_backup)
# Delete soft-deleted rows from main table
bind.execute(sa.delete(conversations).where(conversations.c.is_deleted.is_(sa.true())))
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.drop_column('is_deleted')
@ -51,25 +67,24 @@ def downgrade():
batch_op.add_column(sa.Column('is_deleted', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False))
# Restore soft-deleted conversations from backup table if it exists
# Check if backup table exists using inspector (works for all database types)
bind = op.get_bind()
if bind is None:
# Offline migration generation; skip data restoration logic
context = op.get_context()
if context.is_offline_mode():
return
dialect = bind.dialect.name
true_literal = "1" if dialect == "mysql" else "TRUE"
bind = op.get_bind()
metadata = sa.MetaData()
inspector = inspect(bind)
existing_tables = inspector.get_table_names()
if backup_table_name in existing_tables:
# Restore the soft-deleted conversations
op.execute(sa.text(f"""
INSERT INTO conversations
SELECT * FROM {backup_table_name}
"""))
if backup_table_name in inspector.get_table_names():
conversations = sa.Table('conversations', metadata, autoload_with=bind)
backup_table = sa.Table(backup_table_name, metadata, autoload_with=bind)
# Drop the backup table after restoration
op.execute(sa.text(f"DROP TABLE {backup_table_name}"))
restore_stmt = sa.insert(conversations).from_select(
conversations.columns.keys(),
sa.select(*backup_table.c),
)
bind.execute(restore_stmt)
op.drop_table(backup_table_name)
# ### end Alembic commands ###