Use gpt-5.1-codex-mini as default model in Ralph loop recipes

This commit is contained in:
Anthony Shaw
2026-02-11 13:31:05 -08:00
parent 0e616701a5
commit 3eb7efc990
9 changed files with 641 additions and 12 deletions

View File

@@ -59,7 +59,7 @@ async def ralph_loop(prompt_file: str, max_iterations: int = 50):
# Fresh session each iteration — context isolation is the point
session = await client.create_session(
SessionConfig(model="claude-sonnet-4.5")
SessionConfig(model="gpt-5.1-codex-mini")
)
try:
await session.send_and_wait(
@@ -109,7 +109,7 @@ async def ralph_loop(mode: str = "build", max_iterations: int = 50):
print(f"\n=== Iteration {i}/{max_iterations} ===")
session = await client.create_session(SessionConfig(
model="claude-sonnet-4.5",
model="gpt-5.1-codex-mini",
# Pin the agent to the project directory
working_directory=str(Path.cwd()),
# Auto-approve tool calls for unattended operation

View File

@@ -44,7 +44,7 @@ async def ralph_loop(mode: str = "build", max_iterations: int = 50):
print(f"\n=== Iteration {i}/{max_iterations} ===")
session = await client.create_session(SessionConfig(
model="claude-sonnet-4.5",
model="gpt-5.1-codex-mini",
# Pin the agent to the project directory
working_directory=str(Path.cwd()),
# Auto-approve tool calls for unattended operation