refactor(openapi): co-locate per-mode run response models in _models.py

Prepares for the api-3 unified /run route which imports all three
response shapes from one location. The per-mode files (chat_messages,
completion_messages, workflow_run) still define their own copies inline
until Task 4 deletes them — no collision since neither location is
imported anywhere else.
This commit is contained in:
GareArc 2026-05-06 23:46:53 -07:00
parent 35c08f7c3d
commit 40ae39a3a3
No known key found for this signature in database
2 changed files with 60 additions and 1 deletions

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Any
from pydantic import BaseModel
from pydantic import BaseModel, Field
# Server-side cap on `limit` query param for any /openapi/v1/* list endpoint.
# Sibling endpoints (`/apps`, `/account/sessions`, future routes) all clamp to
@ -67,3 +67,46 @@ class AppDescribeResponse(BaseModel):
info: AppDescribeInfo | None = None
parameters: dict[str, Any] | None = None
input_schema: dict[str, Any] | None = None
class ChatMessageResponse(BaseModel):
event: str
task_id: str
id: str
message_id: str
conversation_id: str
mode: str
answer: str
metadata: MessageMetadata = Field(default_factory=MessageMetadata)
created_at: int
class CompletionMessageResponse(BaseModel):
event: str
task_id: str
id: str
message_id: str
mode: str
answer: str
metadata: MessageMetadata = Field(default_factory=MessageMetadata)
created_at: int
class WorkflowRunData(BaseModel):
id: str
workflow_id: str
status: str
outputs: dict[str, Any] = {}
error: str | None = None
elapsed_time: float | None = None
total_tokens: int | None = None
total_steps: int | None = None
created_at: int | None = None
finished_at: int | None = None
class WorkflowRunResponse(BaseModel):
workflow_run_id: str
task_id: str
mode: str = "workflow" # echoed for CLI per-mode rendering — see endpoints.md L154
data: WorkflowRunData

View File

@ -100,3 +100,19 @@ def test_app_describe_response_nests_info_and_parameters():
dumped = obj.model_dump(mode="json")
assert dumped["info"]["service_api_enabled"] is True
assert dumped["parameters"]["opening_statement"] is None
def test_response_models_dump_per_mode():
from controllers.openapi._models import (
ChatMessageResponse, CompletionMessageResponse, WorkflowRunResponse, WorkflowRunData,
)
chat = ChatMessageResponse(
event="message", task_id="t1", id="m1", message_id="m1",
conversation_id="c1", mode="chat", answer="hi", created_at=0,
)
assert chat.model_dump(mode="json")["mode"] == "chat"
wf = WorkflowRunResponse(
workflow_run_id="r1", task_id="t1",
data=WorkflowRunData(id="r1", workflow_id="w1", status="succeeded"),
)
assert wf.model_dump(mode="json")["data"]["status"] == "succeeded"