diff --git a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py index c6f8818852..742cfc345a 100644 --- a/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py +++ b/api/migrations/versions/2025_09_17_1515-68519ad5cd18_knowledge_pipeline_migrate.py @@ -156,7 +156,7 @@ def upgrade(): sa.Column('type', sa.String(20), nullable=False), sa.Column('file_id', models.types.StringUUID(), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')), - sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'), postgresql_nulls_not_distinct=False) + sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key')) ) with op.batch_alter_table('datasets', schema=None) as batch_op: batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True)) diff --git a/api/models/workflow.py b/api/models/workflow.py index 5f604a51a8..e61005953e 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -890,12 +890,18 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo class WorkflowNodeExecutionOffload(Base): __tablename__ = "workflow_node_execution_offload" __table_args__ = ( + # PostgreSQL 14 treats NULL values as distinct in unique constraints by default, + # allowing multiple records with NULL values for the same column combination. + # + # This behavior allows us to have multiple records with NULL node_execution_id, + # simplifying garbage collection process. UniqueConstraint( "node_execution_id", "type", - # Treat `NULL` as distinct for this unique index, so - # we can have mutitple records with `NULL` node_exeution_id, simplify garbage collection process. - postgresql_nulls_not_distinct=False, + # Note: PostgreSQL 15+ supports explicit `nulls distinct` behavior through + # `postgresql_nulls_not_distinct=False`, which would make our intention clearer. + # We rely on PostgreSQL's default behavior of treating NULLs as distinct values. + # postgresql_nulls_not_distinct=False, ), ) _HASH_COL_SIZE = 64