impl a runable version of dify_agent server

This commit is contained in:
盐粒 Yanli 2026-05-08 01:31:30 +08:00
parent 8c6e9c3b95
commit 5a425f1525
18 changed files with 1245 additions and 21 deletions

View File

@ -0,0 +1,59 @@
"""Example consumer for the Dify Agent run server.
Requires Redis and a running API server. The server starts its Redis Streams
worker in the same process by default, for example:
uv run --project dify-agent uvicorn dify_agent.server.app:app --reload
The default request uses the credential-free pydantic-ai TestModel profile. This
script prints the created run and every event observed through cursor polling.
"""
import asyncio
import httpx
API_BASE_URL = "http://localhost:8000"
async def main() -> None:
async with httpx.AsyncClient(base_url=API_BASE_URL, timeout=30) as client:
create_response = await client.post(
"/runs",
json={
"compositor": {
"schema_version": 1,
"layers": [
{
"name": "prompt",
"type": "plain.prompt",
"config": {
"prefix": "You are a concise assistant.",
"user": "Say hello from the Dify Agent API server example.",
},
}
],
},
"agent_profile": {"provider": "test", "output_text": "Hello from the example TestModel."},
},
)
create_response.raise_for_status()
run = create_response.json()
print("created run", run)
cursor = "0-0"
while True:
events_response = await client.get(f"/runs/{run['run_id']}/events", params={"after": cursor})
events_response.raise_for_status()
page = events_response.json()
cursor = page["next_cursor"] or cursor
for event in page["events"]:
print("event", event)
if event["type"] in {"run_succeeded", "run_failed"}:
return
await asyncio.sleep(0.5)
if __name__ == "__main__":
asyncio.run(main())

View File

@ -0,0 +1,26 @@
"""SSE consumer sketch for the Dify Agent run server.
Create a run with ``run_server_consumer.py`` or any HTTP client, then set RUN_ID
below and run this script while the server is available. It prints raw SSE frames
without requiring model credentials.
"""
import asyncio
import httpx
API_BASE_URL = "http://localhost:8000"
RUN_ID = "replace-with-run-id"
async def main() -> None:
async with httpx.AsyncClient(base_url=API_BASE_URL, timeout=None) as client:
async with client.stream("GET", f"/runs/{RUN_ID}/events/sse") as response:
response.raise_for_status()
async for line in response.aiter_lines():
print(line)
if __name__ == "__main__":
asyncio.run(main())

View File

@ -12,8 +12,10 @@ dependencies = [
"logfire>=4.32.1",
"pydantic>=2.13.3",
"pydantic-ai-slim[anthropic,google,openai]>=1.85.1",
"pydantic-settings>=2.12.0",
"redis>=5",
"sqlmodel>=0.0.38",
"unicorn>=2.1.4",
"uvicorn[standard]>=0.38.0",
"uvloop>=0.22.1",
]

View File

@ -0,0 +1,56 @@
"""Pydantic AI agent construction for runtime profiles.
The initial server exposes only a credential-free ``test`` profile. The factory
keeps model selection out of ``AgentRunRunner`` so production model profiles can
be added without changing storage or HTTP contracts.
"""
from collections.abc import Sequence
from typing import Callable, cast
from pydantic_ai import Agent
from pydantic_ai.messages import UserContent
from pydantic_ai.models.test import TestModel
from agenton.layers.types import PydanticAIPrompt, PydanticAITool
from dify_agent.server.schemas import AgentProfileConfig
def create_agent(
profile: AgentProfileConfig,
*,
system_prompts: Sequence[PydanticAIPrompt[object]],
tools: Sequence[PydanticAITool[object]],
) -> Agent[None, str]:
"""Create the pydantic-ai agent for one run."""
if profile.provider == "test":
return Agent[None, str](
TestModel(custom_output_text=profile.output_text),
output_type=str,
system_prompt=materialize_static_system_prompts(system_prompts),
tools=tools,
)
raise ValueError(f"Unsupported agent profile provider: {profile.provider}")
def materialize_static_system_prompts(system_prompts: Sequence[PydanticAIPrompt[object]]) -> list[str]:
"""Convert MVP static prompt callables into strings for pydantic-ai."""
result: list[str] = []
for prompt in system_prompts:
if isinstance(prompt, str):
result.append(prompt)
elif callable(prompt):
result.append(cast(Callable[[], str], prompt)())
else:
raise TypeError(f"Unsupported system prompt type: {type(prompt).__qualname__}")
return result
def normalize_user_input(user_prompts: Sequence[UserContent]) -> str | Sequence[UserContent]:
"""Return the pydantic-ai run input while preserving multi-part prompts."""
if len(user_prompts) == 1 and isinstance(user_prompts[0], str):
return user_prompts[0]
return list(user_prompts)
__all__ = ["create_agent", "materialize_static_system_prompts", "normalize_user_input"]

View File

@ -0,0 +1,53 @@
"""Safe Agenton compositor construction for API-submitted configs.
Only explicitly registered layer types are constructible here. The MVP registry
contains ``PromptLayer`` so callers can provide system/user prompt fragments while
the runtime preserves hooks for richer profiles later.
"""
from typing import cast
from pydantic_ai.messages import UserContent
from agenton.compositor import Compositor, CompositorConfig, LayerRegistry
from agenton.layers.types import AllPromptTypes, AllToolTypes, AllUserPromptTypes, PydanticAIPrompt, PydanticAITool
from agenton_collections.layers.plain.basic import PromptLayer
from agenton_collections.transformers.pydantic_ai import PYDANTIC_AI_TRANSFORMERS
def create_default_layer_registry() -> LayerRegistry:
"""Return the server registry of safe config-constructible layers."""
registry = LayerRegistry()
registry.register_layer(PromptLayer)
return registry
def build_pydantic_ai_compositor(
config: CompositorConfig,
) -> Compositor[
PydanticAIPrompt[object],
PydanticAITool[object],
AllPromptTypes,
AllToolTypes,
UserContent,
AllUserPromptTypes,
]:
"""Build a Pydantic AI-ready compositor from a validated config."""
return cast(
Compositor[
PydanticAIPrompt[object],
PydanticAITool[object],
AllPromptTypes,
AllToolTypes,
UserContent,
AllUserPromptTypes,
],
Compositor.from_config(
config,
registry=create_default_layer_registry(),
**PYDANTIC_AI_TRANSFORMERS, # pyright: ignore[reportArgumentType]
),
)
__all__ = ["build_pydantic_ai_compositor", "create_default_layer_registry"]

View File

@ -0,0 +1,64 @@
"""Event sink contracts used by the runner and storage adapters.
The runner only needs append-only event writes and status transitions, so tests
can use ``InMemoryRunEventSink`` without Redis. Production storage implements the
same protocol with Redis streams in ``dify_agent.storage.redis_run_store``.
"""
from collections import defaultdict
from typing import Protocol
from pydantic import JsonValue
from dify_agent.server.schemas import RunEvent, RunEventType, RunStatus, utc_now
class RunEventSink(Protocol):
"""Boundary used by runtime code to publish observable run progress."""
async def append_event(self, event: RunEvent) -> str:
"""Persist ``event`` and return its cursor id."""
...
async def update_status(self, run_id: str, status: RunStatus, error: str | None = None) -> None:
"""Persist the current run status."""
...
class InMemoryRunEventSink:
"""Small async-compatible sink for local unit tests and examples."""
events: dict[str, list[RunEvent]]
statuses: dict[str, RunStatus]
errors: dict[str, str | None]
def __init__(self) -> None:
self.events = defaultdict(list)
self.statuses = {}
self.errors = {}
async def append_event(self, event: RunEvent) -> str:
"""Store an event and assign a monotonic per-run cursor."""
event_id = str(len(self.events[event.run_id]) + 1)
stored = event.model_copy(update={"id": event_id})
self.events[event.run_id].append(stored)
return event_id
async def update_status(self, run_id: str, status: RunStatus, error: str | None = None) -> None:
"""Record the latest status; timestamps are owned by run stores."""
self.statuses[run_id] = status
self.errors[run_id] = error
async def emit_run_event(
sink: RunEventSink,
*,
run_id: str,
type: RunEventType,
data: JsonValue,
) -> str:
"""Create and append a timestamped ``RunEvent``."""
return await sink.append_event(RunEvent(run_id=run_id, type=type, data=data, created_at=utc_now()))
__all__ = ["InMemoryRunEventSink", "RunEventSink", "emit_run_event"]

View File

@ -0,0 +1,98 @@
"""Runtime execution for one queued Dify Agent run.
The runner is storage-agnostic: it builds an Agenton compositor, enters or
resumes its session, runs pydantic-ai with ``compositor.user_prompts`` as the user
input, emits stream events, suspends the session on exit, snapshots it, and then
publishes a terminal success or failure event.
"""
from collections.abc import AsyncIterable
from typing import cast
from pydantic import JsonValue, TypeAdapter
from pydantic_ai.messages import AgentStreamEvent
from agenton.compositor import CompositorSessionSnapshot
from dify_agent.runtime.agent_factory import create_agent, normalize_user_input
from dify_agent.runtime.compositor_factory import build_pydantic_ai_compositor
from dify_agent.runtime.event_sink import RunEventSink, emit_run_event
from dify_agent.runtime.user_prompt_validation import EMPTY_USER_PROMPTS_ERROR, has_non_blank_user_prompt
from dify_agent.server.schemas import CreateRunRequest
_AGENT_STREAM_EVENT_ADAPTER = TypeAdapter(AgentStreamEvent)
class AgentRunValidationError(ValueError):
"""Raised when a run request is valid JSON but cannot execute."""
class AgentRunRunner:
"""Executes one run and writes only public run events to its sink."""
sink: RunEventSink
request: CreateRunRequest
run_id: str
def __init__(self, *, sink: RunEventSink, request: CreateRunRequest, run_id: str) -> None:
self.sink = sink
self.request = request
self.run_id = run_id
async def run(self) -> None:
"""Execute the run and emit the documented event sequence."""
await self.sink.update_status(self.run_id, "running")
_ = await emit_run_event(self.sink, run_id=self.run_id, type="run_started", data={})
try:
output, session_snapshot = await self._run_agent()
except Exception as exc:
message = str(exc) or type(exc).__name__
_ = await emit_run_event(self.sink, run_id=self.run_id, type="run_failed", data={"error": message})
await self.sink.update_status(self.run_id, "failed", message)
raise
_ = await emit_run_event(self.sink, run_id=self.run_id, type="agent_output", data={"output": output})
_ = await emit_run_event(
self.sink,
run_id=self.run_id,
type="session_snapshot",
data=cast(JsonValue, session_snapshot.model_dump(mode="json")),
)
_ = await emit_run_event(self.sink, run_id=self.run_id, type="run_succeeded", data={})
await self.sink.update_status(self.run_id, "succeeded")
async def _run_agent(self) -> tuple[str, CompositorSessionSnapshot]:
"""Run pydantic-ai inside an entered Agenton session."""
compositor = build_pydantic_ai_compositor(self.request.compositor)
session = (
compositor.session_from_snapshot(self.request.session_snapshot)
if self.request.session_snapshot is not None
else compositor.new_session()
)
async with compositor.enter(session) as active_session:
active_session.suspend_on_exit()
user_prompts = compositor.user_prompts
if not has_non_blank_user_prompt(user_prompts):
raise AgentRunValidationError(EMPTY_USER_PROMPTS_ERROR)
async def handle_events(_ctx: object, events: AsyncIterable[AgentStreamEvent]) -> None:
async for event in events:
_ = await emit_run_event(
self.sink,
run_id=self.run_id,
type="pydantic_ai_event",
data=cast(JsonValue, _AGENT_STREAM_EVENT_ADAPTER.dump_python(event, mode="json")),
)
agent = create_agent(
self.request.agent_profile,
system_prompts=compositor.prompts,
tools=compositor.tools,
)
result = await agent.run(normalize_user_input(user_prompts), event_stream_handler=handle_events)
return result.output, compositor.snapshot_session(session)
__all__ = ["AgentRunRunner", "AgentRunValidationError"]

View File

@ -0,0 +1,29 @@
"""Validation for effective user prompts produced by Agenton compositors.
Validation happens after safe compositor construction so API and worker paths use
the same semantics as the actual pydantic-ai input. Blank string fragments do not
count as meaningful input; non-string ``UserContent`` is treated as intentional
content because rich media/message parts do not have a universal whitespace
representation.
"""
from collections.abc import Sequence
from pydantic_ai.messages import UserContent
EMPTY_USER_PROMPTS_ERROR = "compositor.user_prompts must not be empty"
def has_non_blank_user_prompt(user_prompts: Sequence[UserContent]) -> bool:
"""Return whether composed user prompts contain meaningful input."""
for prompt in user_prompts:
if isinstance(prompt, str):
if prompt.strip():
return True
else:
return True
return False
__all__ = ["EMPTY_USER_PROMPTS_ERROR", "has_non_blank_user_prompt"]

View File

@ -0,0 +1,78 @@
"""FastAPI application factory for the Dify Agent run server.
The HTTP process owns Redis clients, route wiring, and by default one embedded
Redis Streams worker task. Run execution still happens outside request handlers,
so client latency and disconnects do not control the agent runtime, but local
development only needs one ``uvicorn`` process plus Redis.
"""
import asyncio
import os
import socket
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager, suppress
from fastapi import FastAPI
from redis.asyncio import Redis
from dify_agent.server.routes.runs import create_runs_router
from dify_agent.server.settings import ServerSettings
from dify_agent.storage.redis_run_store import RedisRunStore
from dify_agent.worker.job_worker import RunJobWorker
def create_app(settings: ServerSettings | None = None) -> FastAPI:
"""Build the FastAPI app with one shared Redis-backed run store and worker."""
resolved_settings = settings or ServerSettings()
state: dict[str, RedisRunStore] = {}
@asynccontextmanager
async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]:
redis = Redis.from_url(resolved_settings.redis_url)
store = RedisRunStore(redis, prefix=resolved_settings.redis_prefix)
state["store"] = store
worker_task: asyncio.Task[None] | None = None
if resolved_settings.worker_enabled:
worker = RunJobWorker(
store=store,
group_name=resolved_settings.worker_group_name,
consumer_name=_worker_consumer_name(resolved_settings),
pending_idle_ms=resolved_settings.worker_pending_idle_ms,
)
worker_task = asyncio.create_task(worker.run_forever(), name="dify-agent-run-worker")
# Give the worker one loop turn so startup tests and immediate failures observe the task.
await asyncio.sleep(0)
try:
yield
finally:
if worker_task is not None:
_ = worker_task.cancel()
with suppress(asyncio.CancelledError):
await worker_task
await redis.aclose()
app = FastAPI(title="Dify Agent Run Server", version="0.1.0", lifespan=lifespan)
def get_store() -> RedisRunStore:
return state["store"]
app.include_router(create_runs_router(get_store))
return app
app = create_app()
def _worker_consumer_name(settings: ServerSettings) -> str:
"""Return a stable-enough consumer name for this API process.
Redis consumer names should be unique per live process. The explicit setting
is useful for tests or controlled deployments; otherwise hostname and PID
distinguish common ``uvicorn --workers`` and reload processes.
"""
if settings.worker_consumer_name:
return settings.worker_consumer_name
return f"api-{socket.gethostname()}-{os.getpid()}"
__all__ = ["app", "create_app"]

View File

@ -0,0 +1,86 @@
"""FastAPI routes for asynchronous agent runs.
Controllers translate storage/validation errors into HTTP status codes and keep
worker execution out of the request path. A created run is only queued; clients
observe progress through polling or SSE replay.
"""
from collections.abc import Callable
from typing import Annotated
from fastapi import APIRouter, Depends, Header, HTTPException, Query
from fastapi.responses import StreamingResponse
from dify_agent.runtime.compositor_factory import build_pydantic_ai_compositor
from dify_agent.runtime.user_prompt_validation import EMPTY_USER_PROMPTS_ERROR, has_non_blank_user_prompt
from dify_agent.server.schemas import CreateRunRequest, CreateRunResponse, RunEventsResponse, RunStatusResponse
from dify_agent.server.sse import sse_event_stream
from dify_agent.storage.redis_run_store import RedisRunStore, RunNotFoundError
def create_runs_router(get_store: Callable[[], RedisRunStore]) -> APIRouter:
"""Create routes bound to the application's store dependency provider."""
router = APIRouter(prefix="/runs", tags=["runs"])
async def store_dep() -> RedisRunStore:
return get_store()
@router.post("", response_model=CreateRunResponse, status_code=202)
async def create_run(
request: CreateRunRequest,
store: Annotated[RedisRunStore, Depends(store_dep)],
) -> CreateRunResponse:
try:
compositor = build_pydantic_ai_compositor(request.compositor)
except Exception as exc:
raise HTTPException(status_code=422, detail=str(exc)) from exc
if not has_non_blank_user_prompt(compositor.user_prompts):
raise HTTPException(status_code=422, detail=EMPTY_USER_PROMPTS_ERROR)
record = await store.create_run(request)
return CreateRunResponse(run_id=record.run_id, status=record.status)
@router.get("/{run_id}", response_model=RunStatusResponse)
async def get_run_status(run_id: str, store: Annotated[RedisRunStore, Depends(store_dep)]) -> RunStatusResponse:
try:
record = await store.get_run(run_id)
except RunNotFoundError as exc:
raise HTTPException(status_code=404, detail="run not found") from exc
return RunStatusResponse(
run_id=record.run_id,
status=record.status,
created_at=record.created_at,
updated_at=record.updated_at,
error=record.error,
)
@router.get("/{run_id}/events", response_model=RunEventsResponse)
async def get_run_events(
run_id: str,
store: Annotated[RedisRunStore, Depends(store_dep)],
after: str = Query(default="0-0"),
limit: int = Query(default=100, ge=1, le=500),
) -> RunEventsResponse:
try:
return await store.get_events(run_id, after=after, limit=limit)
except RunNotFoundError as exc:
raise HTTPException(status_code=404, detail="run not found") from exc
@router.get("/{run_id}/events/sse")
async def stream_run_events(
run_id: str,
store: Annotated[RedisRunStore, Depends(store_dep)],
last_event_id: Annotated[str | None, Header(alias="Last-Event-ID")] = None,
after: str | None = Query(default=None),
) -> StreamingResponse:
cursor = after or last_event_id or "0-0"
try:
_ = await store.get_run(run_id)
events = store.iter_events(run_id, after=cursor)
return StreamingResponse(sse_event_stream(events), media_type="text/event-stream")
except RunNotFoundError as exc:
raise HTTPException(status_code=404, detail="run not found") from exc
return router
__all__ = ["create_runs_router"]

View File

@ -0,0 +1,147 @@
"""Public API schemas for the Dify Agent run server.
The server accepts only registry-backed Agenton compositor configs. This keeps
HTTP input data-only and prevents unsafe import-path construction. Run events are
append-only records; Redis stream ids (or in-memory equivalents in tests) are the
public cursors used by polling and SSE replay.
"""
from datetime import datetime, timezone
from typing import Literal
from uuid import uuid4
from pydantic import BaseModel, ConfigDict, Field, JsonValue, field_validator
from agenton.compositor import CompositorConfig, CompositorSessionSnapshot
RunStatus = Literal["queued", "running", "succeeded", "failed"]
RunEventType = Literal[
"run_started",
"pydantic_ai_event",
"agent_output",
"session_snapshot",
"run_succeeded",
"run_failed",
]
def new_run_id() -> str:
"""Return a stable external run id."""
return str(uuid4())
def utc_now() -> datetime:
"""Return the timestamp format used by public schemas."""
return datetime.now(timezone.utc)
class AgentProfileConfig(BaseModel):
"""Minimal model profile for the MVP runner.
``test`` uses pydantic-ai's ``TestModel`` and is credential-free. Other
profiles can be added behind this schema without changing run/event storage.
"""
provider: Literal["test"] = "test"
output_text: str = "Hello from the Dify Agent test model."
model_config = ConfigDict(extra="forbid")
class CreateRunRequest(BaseModel):
"""Request body for creating one async agent run."""
compositor: CompositorConfig
session_snapshot: CompositorSessionSnapshot | None = None
agent_profile: AgentProfileConfig = Field(default_factory=AgentProfileConfig)
model_config = ConfigDict(extra="forbid")
class CreateRunResponse(BaseModel):
"""Response returned after a run job has been durably queued."""
run_id: str
status: RunStatus
model_config = ConfigDict(extra="forbid")
class RunStatusResponse(BaseModel):
"""Current server-side status for one run."""
run_id: str
status: RunStatus
created_at: datetime
updated_at: datetime
error: str | None = None
model_config = ConfigDict(extra="forbid")
class RunEvent(BaseModel):
"""Append-only event visible through polling and SSE."""
id: str | None = None
run_id: str
type: RunEventType
data: JsonValue = Field(default_factory=dict)
created_at: datetime = Field(default_factory=utc_now)
model_config = ConfigDict(extra="forbid")
class RunEventsResponse(BaseModel):
"""Cursor-paginated event log response."""
run_id: str
events: list[RunEvent]
next_cursor: str | None = None
model_config = ConfigDict(extra="forbid")
class RunnerJob(BaseModel):
"""Durable worker payload stored in Redis streams."""
run_id: str
request: CreateRunRequest
model_config = ConfigDict(extra="forbid")
class RunRecord(BaseModel):
"""Internal representation persisted for status reads."""
run_id: str
status: RunStatus
request: CreateRunRequest
created_at: datetime = Field(default_factory=utc_now)
updated_at: datetime = Field(default_factory=utc_now)
error: str | None = None
model_config = ConfigDict(extra="forbid")
@field_validator("updated_at")
@classmethod
def updated_at_must_be_timezone_aware(cls, value: datetime) -> datetime:
"""Reject naive timestamps before they become JSON API values."""
if value.tzinfo is None:
raise ValueError("updated_at must be timezone-aware")
return value
__all__ = [
"AgentProfileConfig",
"CreateRunRequest",
"CreateRunResponse",
"RunEvent",
"RunEventsResponse",
"RunRecord",
"RunStatus",
"RunStatusResponse",
"RunnerJob",
"new_run_id",
"utc_now",
]

View File

@ -0,0 +1,31 @@
"""Configuration for the FastAPI run server and embedded worker."""
from typing import ClassVar
from pydantic_settings import BaseSettings, SettingsConfigDict
class ServerSettings(BaseSettings):
"""Environment-backed settings shared by HTTP routes and the run worker.
The default deployment mode runs the Redis Streams worker inside the FastAPI
process so a single ``uvicorn`` command is enough for local development and
small deployments. Set ``DIFY_AGENT_WORKER_ENABLED=false`` when running a
separate worker process or when only the HTTP API should be started.
"""
redis_url: str = "redis://localhost:6379/0"
redis_prefix: str = "dify-agent"
worker_enabled: bool = True
worker_group_name: str = "run-workers"
worker_consumer_name: str | None = None
worker_pending_idle_ms: int = 600_000
model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
env_prefix="DIFY_AGENT_",
env_file=(".env", "dify-agent/.env"),
extra="ignore",
)
__all__ = ["ServerSettings"]

View File

@ -0,0 +1,29 @@
"""Server-sent event formatting for run event replay.
SSE frames use the run event id as ``id`` and the run event type as ``event`` so
browsers can resume with ``Last-Event-ID`` while clients can subscribe by event
name. Payload data is the full public ``RunEvent`` JSON object.
"""
from collections.abc import AsyncIterable, AsyncIterator
from dify_agent.server.schemas import RunEvent
def format_sse_event(event: RunEvent) -> str:
"""Serialize one event as an SSE frame."""
lines: list[str] = []
if event.id is not None:
lines.append(f"id: {event.id}")
lines.append(f"event: {event.type}")
lines.append(f"data: {event.model_dump_json()}")
return "\n".join(lines) + "\n\n"
async def sse_event_stream(events: AsyncIterable[RunEvent]) -> AsyncIterator[str]:
"""Yield formatted SSE frames from public run events."""
async for event in events:
yield format_sse_event(event)
__all__ = ["format_sse_event", "sse_event_stream"]

View File

@ -0,0 +1,23 @@
"""Redis key helpers for the run server.
Keys are centralized so workers, projectors, and HTTP routes can share the same
stream/hash layout without duplicating string formats.
"""
def run_record_key(prefix: str, run_id: str) -> str:
"""Return the Redis string key holding one serialized run record."""
return f"{prefix}:runs:{run_id}:record"
def run_events_key(prefix: str, run_id: str) -> str:
"""Return the Redis stream key holding one run's event log."""
return f"{prefix}:runs:{run_id}:events"
def run_jobs_key(prefix: str) -> str:
"""Return the Redis stream key holding queued run jobs."""
return f"{prefix}:runs:jobs"
__all__ = ["run_events_key", "run_jobs_key", "run_record_key"]

View File

@ -0,0 +1,128 @@
"""Redis Streams-backed run persistence.
The store writes run records as JSON strings and events/jobs as Redis streams.
HTTP event cursors are Redis stream ids; ``0-0`` means replay from the beginning
for polling and SSE. The worker uses the jobs stream directly and updates the run
record through the same status/event sink protocol as tests.
"""
from collections.abc import AsyncIterator
from typing import cast
from pydantic import JsonValue
from redis.asyncio import Redis
from dify_agent.runtime.event_sink import RunEventSink
from dify_agent.server.schemas import (
CreateRunRequest,
RunEvent,
RunEventsResponse,
RunRecord,
RunStatus,
RunnerJob,
new_run_id,
utc_now,
)
from dify_agent.storage.redis_keys import run_events_key, run_jobs_key, run_record_key
class RunNotFoundError(LookupError):
"""Raised when a requested run record does not exist."""
class RedisRunStore(RunEventSink):
"""Async Redis implementation for run records, jobs, and events."""
redis: Redis
prefix: str
def __init__(self, redis: Redis, *, prefix: str = "dify-agent") -> None:
self.redis = redis
self.prefix = prefix
async def create_run(self, request: CreateRunRequest) -> RunRecord:
"""Persist a queued run and enqueue its worker job atomically.
The run record and jobs stream entry are one durability boundary: either
both are committed by Redis ``MULTI/EXEC`` or neither is visible. This
prevents permanently queued records with no corresponding worker job.
"""
run_id = new_run_id()
record = RunRecord(run_id=run_id, status="queued", request=request)
job = RunnerJob(run_id=run_id, request=request)
async with self.redis.pipeline(transaction=True) as pipe:
pipe.set(run_record_key(self.prefix, run_id), record.model_dump_json())
pipe.xadd(run_jobs_key(self.prefix), {"payload": job.model_dump_json()})
await pipe.execute()
return record
async def get_run(self, run_id: str) -> RunRecord:
"""Return one run record or raise ``RunNotFoundError``."""
value = await self.redis.get(run_record_key(self.prefix, run_id))
if value is None:
raise RunNotFoundError(run_id)
if isinstance(value, bytes):
value = value.decode()
return RunRecord.model_validate_json(value)
async def update_status(self, run_id: str, status: RunStatus, error: str | None = None) -> None:
"""Update the status fields of an existing run record."""
record = await self.get_run(run_id)
updated = record.model_copy(update={"status": status, "updated_at": utc_now(), "error": error})
await self.redis.set(run_record_key(self.prefix, run_id), updated.model_dump_json())
async def append_event(self, event: RunEvent) -> str:
"""Append an event JSON payload to the run's Redis stream."""
event_id = await self.redis.xadd(
run_events_key(self.prefix, event.run_id),
{"payload": event.model_dump_json(exclude={"id"})},
)
return event_id.decode() if isinstance(event_id, bytes) else str(event_id)
async def get_events(self, run_id: str, *, after: str = "0-0", limit: int = 100) -> RunEventsResponse:
"""Read a bounded page of events after ``after`` cursor."""
await self.get_run(run_id)
raw_events = await self.redis.xrange(run_events_key(self.prefix, run_id), min=f"({after}", count=limit)
events = [self._decode_event(run_id, raw_id, fields) for raw_id, fields in raw_events]
next_cursor = events[-1].id if events else after
return RunEventsResponse(run_id=run_id, events=events, next_cursor=next_cursor)
async def iter_events(self, run_id: str, *, after: str = "0-0") -> AsyncIterator[RunEvent]:
"""Yield replayed and future events for SSE clients."""
await self.get_run(run_id)
cursor = after
while True:
page = await self.get_events(run_id, after=cursor, limit=100)
for event in page.events:
if event.id is not None:
cursor = event.id
yield event
if not page.events:
break
while True:
response = await self.redis.xread({run_events_key(self.prefix, run_id): cursor}, block=30_000, count=100)
if not response:
continue
for _stream_name, entries in response:
for raw_id, fields in entries:
event = self._decode_event(run_id, raw_id, fields)
if event.id is not None:
cursor = event.id
yield event
@staticmethod
def _decode_event(run_id: str, raw_id: object, fields: dict[object, object]) -> RunEvent:
"""Decode one Redis stream entry into a public event."""
payload = fields.get(b"payload") or fields.get("payload")
if isinstance(payload, bytes):
payload = payload.decode()
event_id = raw_id.decode() if isinstance(raw_id, bytes) else str(raw_id)
return RunEvent.model_validate_json(cast(str, payload)).model_copy(update={"id": event_id, "run_id": run_id})
def json_field(value: object) -> JsonValue:
"""Narrow helper for dynamic Redis payloads."""
return cast(JsonValue, value)
__all__ = ["RedisRunStore", "RunNotFoundError"]

View File

@ -0,0 +1,153 @@
"""Redis Streams worker for executing queued runs.
This worker is asyncio/uvloop compatible and intentionally does not use Celery.
It reads jobs from the shared Redis stream, executes them through
``AgentRunRunner``, and acknowledges entries only after terminal status/events
have been written.
"""
import asyncio
import logging
from collections.abc import Callable
from typing import Protocol, cast
from redis.asyncio import Redis
from dify_agent.runtime.runner import AgentRunRunner
from dify_agent.server.schemas import RunnerJob
from dify_agent.server.settings import ServerSettings
from dify_agent.storage.redis_keys import run_jobs_key
from dify_agent.storage.redis_run_store import RedisRunStore
logger = logging.getLogger(__name__)
class JobRunner(Protocol):
"""Executable unit for one decoded run job."""
async def run(self) -> None:
"""Execute the job and write terminal status/events."""
...
type JobRunnerFactory = Callable[[RunnerJob], JobRunner]
def create_default_job_runner(store: RedisRunStore, job: RunnerJob) -> JobRunner:
"""Create the production runner for a decoded Redis job."""
return AgentRunRunner(sink=store, request=job.request, run_id=job.run_id)
class RunJobWorker:
"""Long-running worker that consumes the run jobs stream."""
store: RedisRunStore
group_name: str
consumer_name: str
pending_idle_ms: int
runner_factory: JobRunnerFactory
def __init__(
self,
*,
store: RedisRunStore,
group_name: str = "run-workers",
consumer_name: str = "worker-1",
pending_idle_ms: int = 60_000,
runner_factory: JobRunnerFactory | None = None,
) -> None:
self.store = store
self.group_name = group_name
self.consumer_name = consumer_name
self.pending_idle_ms = pending_idle_ms
self.runner_factory = runner_factory or (lambda job: create_default_job_runner(store, job))
async def run_forever(self) -> None:
"""Continuously read and execute jobs until cancelled."""
jobs_key = run_jobs_key(self.store.prefix)
await self._ensure_group(jobs_key)
while True:
await self.process_once(jobs_key, block_ms=30_000)
async def process_once(self, jobs_key: str | None = None, *, block_ms: int = 30_000) -> bool:
"""Process one stale pending or new job entry.
Stale pending entries are reclaimed before blocking on new work. This
covers worker crashes after ``XREADGROUP`` delivery but before ``XACK``:
Redis keeps the entry pending, and another worker can claim it after the
configured idle timeout instead of leaving the run stuck forever.
"""
resolved_jobs_key = jobs_key or run_jobs_key(self.store.prefix)
claimed = await self._claim_stale_pending(resolved_jobs_key)
if claimed:
for entry_id, fields in claimed:
await self._handle_entry(resolved_jobs_key, entry_id, fields)
return True
response = await self.store.redis.xreadgroup(
self.group_name,
self.consumer_name,
{resolved_jobs_key: ">"},
count=1,
block=block_ms,
)
for _stream_name, entries in response:
for entry_id, fields in entries:
await self._handle_entry(resolved_jobs_key, entry_id, fields)
return True
return False
async def _claim_stale_pending(self, jobs_key: str) -> list[tuple[object, dict[object, object]]]:
"""Claim stale pending jobs from crashed consumers."""
response = await self.store.redis.xautoclaim(
jobs_key,
self.group_name,
self.consumer_name,
min_idle_time=self.pending_idle_ms,
start_id="0-0",
count=1,
)
if len(response) >= 2:
entries = response[1]
return list(entries)
return []
async def _ensure_group(self, jobs_key: str) -> None:
"""Create the Redis consumer group if needed."""
try:
await self.store.redis.xgroup_create(jobs_key, self.group_name, id="0", mkstream=True)
except Exception as exc:
if "BUSYGROUP" not in str(exc):
raise
async def _handle_entry(self, jobs_key: str, entry_id: object, fields: dict[object, object]) -> None:
"""Decode and execute one stream entry."""
payload = fields.get(b"payload") or fields.get("payload")
if isinstance(payload, bytes):
payload = payload.decode()
if not isinstance(payload, str | bytes | bytearray):
raise ValueError("Redis job payload must be JSON text")
job = RunnerJob.model_validate_json(payload)
try:
await self.runner_factory(job).run()
except Exception:
logger.exception("run worker failed", extra={"run_id": job.run_id})
finally:
await self.store.redis.xack(jobs_key, self.group_name, cast(str | bytes, entry_id))
async def main() -> None:
"""Run the worker using environment settings."""
settings = ServerSettings()
redis = Redis.from_url(settings.redis_url)
try:
await RunJobWorker(store=RedisRunStore(redis, prefix=settings.redis_prefix)).run_forever()
finally:
await redis.aclose()
if __name__ == "__main__":
asyncio.run(main())
__all__ = ["RunJobWorker", "main"]

View File

@ -0,0 +1,21 @@
"""Lightweight run-event projector service.
The MVP writes status directly from the runner/store, so this projector currently
acts as an async-compatible extension point for future derived views. Keeping the
module explicit documents that Redis Streams, not Celery, are the service
boundary for background processing.
"""
import asyncio
class RunProjector:
"""No-op projector placeholder with a cancellable service loop."""
async def run_forever(self) -> None:
"""Stay alive until cancelled; future projections can be added here."""
while True:
await asyncio.sleep(3600)
__all__ = ["RunProjector"]

181
dify-agent/uv.lock generated
View File

@ -444,8 +444,10 @@ dependencies = [
{ name = "logfire" },
{ name = "pydantic" },
{ name = "pydantic-ai-slim", extra = ["anthropic", "google", "openai"] },
{ name = "pydantic-settings" },
{ name = "redis" },
{ name = "sqlmodel" },
{ name = "unicorn" },
{ name = "uvicorn", extra = ["standard"] },
{ name = "uvloop" },
]
@ -465,8 +467,10 @@ requires-dist = [
{ name = "logfire", specifier = ">=4.32.1" },
{ name = "pydantic", specifier = ">=2.13.3" },
{ name = "pydantic-ai-slim", extras = ["anthropic", "google", "openai"], specifier = ">=1.85.1" },
{ name = "pydantic-settings", specifier = ">=2.12.0" },
{ name = "redis", specifier = ">=5" },
{ name = "sqlmodel", specifier = ">=0.0.38" },
{ name = "unicorn", specifier = ">=2.1.4" },
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.38.0" },
{ name = "uvloop", specifier = ">=0.22.1" },
]
@ -772,6 +776,35 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httptools"
version = "0.7.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" },
{ url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" },
{ url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" },
{ url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" },
{ url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" },
{ url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" },
{ url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" },
{ url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" },
{ url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" },
{ url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" },
{ url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" },
{ url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" },
{ url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" },
{ url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" },
{ url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" },
{ url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" },
{ url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" },
{ url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" },
{ url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" },
{ url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" },
{ url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
@ -1965,6 +1998,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0f/49/71b66c79df6ffbf3a340a33602ce44873548f589548d5fb5d8873b870f05/pydantic_graph-1.85.1-py3-none-any.whl", hash = "sha256:515bee899bbfbf00911e32db941c69f2a72bc8fff56ea03a99fa10cd0fa5c436", size = 73066, upload-time = "2026-04-22T00:08:19.025Z" },
]
[[package]]
name = "pydantic-settings"
version = "2.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/98/c8345dccdc31de4228c039a98f6467a941e39558da41c1744fbe29fa5666/pydantic_settings-2.14.0.tar.gz", hash = "sha256:24285fd4b0e0c06507dd9fdfd331ee23794305352aaec8fc4eb92d4047aeb67d", size = 235709, upload-time = "2026-04-20T13:37:40.293Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/01/dd/bebff3040138f00ae8a102d426b27349b9a49acc310fcae7f92112d867e3/pydantic_settings-2.14.0-py3-none-any.whl", hash = "sha256:fc8d5d692eb7092e43c8647c1c35a3ecd00e040fcf02ed86f4cb5458ca62182e", size = 60940, upload-time = "2026-04-20T13:37:38.586Z" },
]
[[package]]
name = "pygments"
version = "2.20.0"
@ -2152,6 +2199,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d0/00/1e03a4989fa5795da308cd774f05b704ace555a70f9bf9d3be057b680bcf/python_docx-1.2.0-py3-none-any.whl", hash = "sha256:3fd478f3250fbbbfd3b94fe1e985955737c145627498896a8a6bf81f4baf66c7", size = 252987, upload-time = "2025-06-16T20:46:22.506Z" },
]
[[package]]
name = "python-dotenv"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
[[package]]
name = "python-iso639"
version = "2026.4.20"
@ -2317,6 +2373,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/70/a6/51fc1b0e61e3326e1c68a61cfd0c6b3c34c843681c4b1eefbf0596f59162/rapidfuzz-3.14.5-cp314-cp314t-win_arm64.whl", hash = "sha256:3e91dcd2549b8f8d843f98ba03a17e01f3d8b72ce942adbbb6761bc58ffce813", size = 855409, upload-time = "2026-04-07T11:16:15.787Z" },
]
[[package]]
name = "redis"
version = "7.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" },
]
[[package]]
name = "referencing"
version = "0.37.0"
@ -3026,24 +3091,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/70/d460bd685a170790ec89317e9bd33047988e4bce507b831f5db771e142de/tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9", size = 348952, upload-time = "2026-04-03T11:25:20.313Z" },
]
[[package]]
name = "unicorn"
version = "2.1.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b2/1b/b4248aa8422e86de690cf8e85cf8feae4c33405a097d1ebe71570bdaa6f5/unicorn-2.1.4.tar.gz", hash = "sha256:00567a70e323f749b419cd86bee4f9115beab7ebba32194581c090cbb7c59cff", size = 2900334, upload-time = "2025-09-09T17:10:48.026Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/a7/92b47771e2107a201632a199cec91e8a81ee8a071ca6b7e7d600d8c61ac9/unicorn-2.1.4-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2a6f738fab5fabffa56af1e7bbf16ea1e91466c342f8dc64f125bd70f36c6b80", size = 12958220, upload-time = "2025-09-09T17:10:04.86Z" },
{ url = "https://files.pythonhosted.org/packages/6c/ae/4943c6f8524d729ec7d5e69df6407ea05d710fe77471d91cecf3fc64eb57/unicorn-2.1.4-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d6c93e0f60328d8f4a1792af3f834137a28050fcc2305f2ec01efe8558a9844e", size = 12142730, upload-time = "2025-09-09T17:10:07.48Z" },
{ url = "https://files.pythonhosted.org/packages/33/9f/32d41eb942221bcf4417cdc65537fc8b3bbbd6079d6c161e621f1dd4e94a/unicorn-2.1.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd1fb0c9af5f57e356d8a96928b4fe045b2e18f308ef23b481d5f970008aa722", size = 15372569, upload-time = "2025-09-09T17:10:09.765Z" },
{ url = "https://files.pythonhosted.org/packages/5f/7f/83161916dedff22ddb187bd2751150a1bc53c88b7c1a8d6fa1cc9e144c64/unicorn-2.1.4-cp37-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f4e63b76ac4faa7cd32a4c436e96eabc24b91d52e73bde7699ec886bddf9277", size = 19842875, upload-time = "2025-09-09T17:10:11.795Z" },
{ url = "https://files.pythonhosted.org/packages/e7/df/ded5e3684c2d7600b30cc8a7530277b8cb36644a1a9d34cade7ebb45604c/unicorn-2.1.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6e6dea140560de4ebd8446661f7ef84a357d428c14a3ef09dacd306ec8c239", size = 16436886, upload-time = "2025-09-09T17:10:14.079Z" },
{ url = "https://files.pythonhosted.org/packages/70/38/ba5a051c844026e59ab6e0017db8cec77dbe20ab5f1d6edae1ce9d885b06/unicorn-2.1.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:01d744ba01c5cc68f1d7afe3d183f1868720fd440ec4eaedc4d1d5d9bf54b84c", size = 16089431, upload-time = "2025-09-09T17:10:16.557Z" },
{ url = "https://files.pythonhosted.org/packages/35/07/0b2fb9d2a462066aa24b7d18b463300df79cc4eaa471379f8af7d216261c/unicorn-2.1.4-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:ce5c3bfd05f2a5749a0d8a960e1dfc26519a2d09377dc01cfa1378b936a953f8", size = 20531342, upload-time = "2025-09-09T17:10:19.008Z" },
{ url = "https://files.pythonhosted.org/packages/ed/4b/4628ccb20eb3ad1af400de8181d1f4e5c1a3fc2affa1b3410c1b2d71af36/unicorn-2.1.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d348a90ee90219d141cb115ef8ed7e3fd1af42afaee105f7580761d775b25e32", size = 16925029, upload-time = "2025-09-09T17:10:21.089Z" },
{ url = "https://files.pythonhosted.org/packages/08/23/6ae96f8efa8ede707cc67d18d76a08de498bd818483cf8211baf855eabb2/unicorn-2.1.4-cp37-abi3-win32.whl", hash = "sha256:9be89e69be5e2631299f39a7fb8e47b8d3bfaa70ae746e9c4c5f9476a2df9778", size = 11798091, upload-time = "2025-09-09T17:10:23.477Z" },
{ url = "https://files.pythonhosted.org/packages/70/3d/de7be9bd1addabe6d8a1369381f8a080400c349850e978689c5e18287957/unicorn-2.1.4-cp37-abi3-win_amd64.whl", hash = "sha256:d7107500c64ce5c168fbff6bef9485b5db1350050036f4cea568650cf8bdbdf5", size = 15943079, upload-time = "2025-09-09T17:10:25.265Z" },
]
[[package]]
name = "unstructured"
version = "0.21.5"
@ -3122,6 +3169,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
]
[[package]]
name = "uvicorn"
version = "0.46.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1f/93/041fca8274050e40e6791f267d82e0e2e27dd165627bd640d3e0e378d877/uvicorn-0.46.0.tar.gz", hash = "sha256:fb9da0926999cc6cb22dc7cd71a94a632f078e6ae47ff683c5c420750fb7413d", size = 88758, upload-time = "2026-04-23T07:16:00.151Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/31/a3/5b1562db76a5a488274b2332a97199b32d0442aca0ed193697fd47786316/uvicorn-0.46.0-py3-none-any.whl", hash = "sha256:bbebbcbed972d162afca128605223022bedd345b7bc7855ce66deb31487a9048", size = 70926, upload-time = "2026-04-23T07:15:58.355Z" },
]
[package.optional-dependencies]
standard = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "httptools" },
{ name = "python-dotenv" },
{ name = "pyyaml" },
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
{ name = "watchfiles" },
{ name = "websockets" },
]
[[package]]
name = "uvloop"
version = "0.22.1"
@ -3166,6 +3237,76 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/06/7c/34330a89da55610daa5f245ddce5aab81244321101614751e7537f125133/wasabi-1.1.3-py3-none-any.whl", hash = "sha256:f76e16e8f7e79f8c4c8be49b4024ac725713ab10cd7f19350ad18a8e3f71728c", size = 27880, upload-time = "2024-05-31T16:56:16.699Z" },
]
[[package]]
name = "watchfiles"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
{ url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" },
{ url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" },
{ url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" },
{ url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" },
{ url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" },
{ url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" },
{ url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" },
{ url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" },
{ url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" },
{ url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" },
{ url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" },
{ url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" },
{ url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" },
{ url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" },
{ url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" },
{ url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" },
{ url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" },
{ url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" },
{ url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" },
{ url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" },
{ url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" },
{ url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" },
{ url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" },
{ url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" },
{ url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" },
{ url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" },
{ url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" },
{ url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" },
{ url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" },
{ url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" },
{ url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" },
{ url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" },
{ url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" },
{ url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" },
{ url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" },
{ url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" },
{ url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" },
{ url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" },
{ url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" },
{ url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" },
{ url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" },
{ url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" },
{ url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" },
{ url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" },
{ url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" },
{ url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" },
{ url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" },
{ url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" },
{ url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" },
{ url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" },
{ url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" },
{ url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" },
{ url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" },
{ url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" },
{ url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" },
{ url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" },
]
[[package]]
name = "weasel"
version = "1.0.0"