Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions src/agent/loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,17 @@
import logging

from ..config.settings import Settings
from ..llm.base import ResourceExhaustedError
from ..store.factory import create_checkpointer
from .graph import build_graph

logger = logging.getLogger(__name__)

# Per-1M-token pricing: (input, output)
_PRICING: dict[str, tuple[float, float]] = {
"anthropic": (3.0, 15.0), # Claude Sonnet
"gemini": (1.25, 10.0), # Gemini 2.5 Pro
"codex": (2.50, 10.0), # Codex
"anthropic": (3.0, 15.0), # Claude Sonnet
"gemini": (1.25, 10.0), # Gemini 2.5 Pro
"codex": (2.50, 10.0), # Codex
}


Expand Down Expand Up @@ -87,7 +88,11 @@ async def run(self) -> str:
"repo_url": self.repo_config.url,
}

final_state = await graph.ainvoke(initial_state, config=config)
try:
final_state = await graph.ainvoke(initial_state, config=config)
except ResourceExhaustedError as e:
logger.error("LLM resource exhausted. Aborting run: %s", e)
return "Agent stopped due to LLM resource exhaustion."

token_usage = final_state.get("token_usage", {})
input_tokens = token_usage.get("input_tokens", 0)
Expand Down
9 changes: 8 additions & 1 deletion src/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
from langchain_core.output_parsers import BaseOutputParser


class ResourceExhaustedError(Exception):
"""Raised when the LLM response indicates a resource exhaustion error."""


class RobustJsonOutputParser(BaseOutputParser[dict[str, Any]]):
"""Extract JSON from LLM responses, handling markdown fences and surrounding text.

Expand All @@ -33,6 +37,9 @@ def parse(self, text: str | list) -> dict[str, Any]:
parts.append(item.get("text", ""))
text = "".join(parts)

if "RESOURCE_EXHAUSTED" in text:
raise ResourceExhaustedError("LLM response indicates resource exhaustion")

text = text.strip()

# Try direct parse first (fast path for well-behaved responses)
Expand All @@ -51,7 +58,7 @@ def parse(self, text: str | list) -> dict[str, Any]:
continue

# Brute-force: find every '{' and try json.loads from it, longest match first.
for m in re.finditer(r"\{", text):
for m in re.finditer(r"{", text):
start = m.start()
end = text.rfind("}", start)
while end > start:
Expand Down
11 changes: 11 additions & 0 deletions tests/test_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@

from pathlib import Path

import pytest

from src.agent.executor import list_files, read_file, run_command
from src.llm.base import ResourceExhaustedError, RobustJsonOutputParser


def test_run_command_success(tmp_path: Path):
Expand Down Expand Up @@ -40,3 +43,11 @@ def test_read_file(tmp_path: Path):
def test_read_file_missing(tmp_path: Path):
result = read_file(tmp_path / "nope.txt")
assert "Error" in result


def test_robust_json_parser_resource_exhausted():
"""Verify parser raises ResourceExhaustedError on matching error text."""
parser = RobustJsonOutputParser()
error_text = "Something went wrong. RESOURCE_EXHAUSTED. Please try again."
with pytest.raises(ResourceExhaustedError):
parser.parse(error_text)