Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion apps/agent-gateway/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,3 @@
Note: `apps/agent-gateway` is not a Python package (hyphenated path), so modules in this
folder are imported by adding the gateway root to `sys.path` in `app/main.py`.
"""

24 changes: 18 additions & 6 deletions apps/agent-gateway/api/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@
"X-Accel-Buffering": "no",
}

FrameType = Literal["intent", "thought", "code_delta", "trajectory", "delta", "final", "tool", "error"]
FrameType = Literal[
"intent", "thought", "code_delta", "trajectory", "delta", "final", "tool", "error"
]


def _truthy_env(name: str, default: str = "0") -> bool:
Expand Down Expand Up @@ -192,8 +194,12 @@ def _schedule_code_delta_persist(self, tool_event: Mapping[str, Any]) -> None:

payload = {
"name": str(tool_event.get("name") or ""),
"input": tool_event.get("input") if isinstance(tool_event.get("input"), Mapping) else {},
"output": tool_event.get("output") if isinstance(tool_event.get("output"), Mapping) else None,
"input": (
tool_event.get("input") if isinstance(tool_event.get("input"), Mapping) else {}
),
"output": (
tool_event.get("output") if isinstance(tool_event.get("output"), Mapping) else None
),
"status": str(tool_event.get("status") or "ok"),
}

Expand Down Expand Up @@ -223,7 +229,9 @@ def translate(self, event: Mapping[str, Any]) -> list[AGUIEvent]:
for kind, chunk in pieces:
if not chunk:
continue
out.append(self._thought_delta(chunk) if kind == "thought" else self._text_delta(chunk))
out.append(
self._thought_delta(chunk) if kind == "thought" else self._text_delta(chunk)
)
return out

if event_type == "final":
Expand All @@ -232,14 +240,18 @@ def translate(self, event: Mapping[str, Any]) -> list[AGUIEvent]:
metadata = data.get("metadata") if isinstance(data.get("metadata"), Mapping) else {}
else:
text = event.get("text", "")
metadata = event.get("metadata") if isinstance(event.get("metadata"), Mapping) else {}
metadata = (
event.get("metadata") if isinstance(event.get("metadata"), Mapping) else {}
)

flushed = self._splitter.flush()
out: list[AGUIEvent] = []
for kind, chunk in flushed:
if not chunk:
continue
out.append(self._thought_delta(chunk) if kind == "thought" else self._text_delta(chunk))
out.append(
self._thought_delta(chunk) if kind == "thought" else self._text_delta(chunk)
)

out.append(
AGUIEvent.final(
Expand Down
5 changes: 3 additions & 2 deletions apps/agent-gateway/app/api/legacy/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,9 @@ def _vertex_enabled() -> bool:

async def get_tenant_context(
auth: AuthContext = Depends(get_auth_context),
x_vertice_org: str
| None = None, # Header dependency should be injected by caller or middleware
x_vertice_org: (
str | None
) = None, # Header dependency should be injected by caller or middleware
) -> TenantContext:
# Note: Header dependency injection happens at router level usually
return await resolve_tenant(_STORE, uid=auth.uid, org_id=x_vertice_org)
Expand Down
1 change: 1 addition & 0 deletions apps/agent-gateway/app/api/legacy/schemas.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Schemas for Legacy API routes.
"""

from typing import Optional
from pydantic import BaseModel

Expand Down
6 changes: 3 additions & 3 deletions apps/agent-gateway/app/nexus/alloydb/evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,9 @@ async def get_best_candidates(self, limit: int = 10) -> List[EvolutionaryCandida
ancestry=row["ancestry"] or [],
generation=row["generation"],
island_id=row["island_id"],
fitness_scores=json.loads(row["fitness_scores"])
if row["fitness_scores"]
else {},
fitness_scores=(
json.loads(row["fitness_scores"]) if row["fitness_scores"] else {}
),
evaluation_count=row["evaluation_count"],
)
for row in rows
Expand Down
8 changes: 5 additions & 3 deletions apps/agent-gateway/app/observability/feedback.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,11 @@ def from_dict(cls, data: Dict[str, Any]) -> "FeedbackRecord":
feedback_type=FeedbackType(data.get("feedback_type", "neutral")),
score=data.get("score", 0),
comment=data.get("comment"),
timestamp=datetime.fromisoformat(data["timestamp"])
if "timestamp" in data
else datetime.now(timezone.utc),
timestamp=(
datetime.fromisoformat(data["timestamp"])
if "timestamp" in data
else datetime.now(timezone.utc)
),
metadata=data.get("metadata", {}),
)

Expand Down
6 changes: 3 additions & 3 deletions apps/agent-gateway/app/observability/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,9 @@ def format(self, record: logging.LogRecord) -> str:
# Add trace context if available
if hasattr(record, "otelTraceID") and record.otelTraceID:
project_id = os.getenv("GOOGLE_CLOUD_PROJECT", "vertice-ai")
log_obj[
"logging.googleapis.com/trace"
] = f"projects/{project_id}/traces/{record.otelTraceID}"
log_obj["logging.googleapis.com/trace"] = (
f"projects/{project_id}/traces/{record.otelTraceID}"
)
if hasattr(record, "otelSpanID") and record.otelSpanID:
log_obj["logging.googleapis.com/spanId"] = record.otelSpanID
if hasattr(record, "otelTraceSampled"):
Expand Down
32 changes: 20 additions & 12 deletions apps/agent-gateway/app/privacy/erasure.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,18 +81,26 @@ def from_dict(cls, data: Dict[str, Any]) -> "ErasureRequest":
user_id=data.get("user_id", ""),
org_id=data.get("org_id", ""),
status=ErasureStatus(data.get("status", "pending")),
requested_at=datetime.fromisoformat(data["requested_at"])
if "requested_at" in data
else datetime.now(timezone.utc),
soft_deleted_at=datetime.fromisoformat(data["soft_deleted_at"])
if data.get("soft_deleted_at")
else None,
hard_delete_scheduled=datetime.fromisoformat(data["hard_delete_scheduled"])
if data.get("hard_delete_scheduled")
else None,
hard_deleted_at=datetime.fromisoformat(data["hard_deleted_at"])
if data.get("hard_deleted_at")
else None,
requested_at=(
datetime.fromisoformat(data["requested_at"])
if "requested_at" in data
else datetime.now(timezone.utc)
),
soft_deleted_at=(
datetime.fromisoformat(data["soft_deleted_at"])
if data.get("soft_deleted_at")
else None
),
hard_delete_scheduled=(
datetime.fromisoformat(data["hard_delete_scheduled"])
if data.get("hard_delete_scheduled")
else None
),
hard_deleted_at=(
datetime.fromisoformat(data["hard_deleted_at"])
if data.get("hard_deleted_at")
else None
),
collections_affected=data.get("collections_affected", []),
documents_deleted=data.get("documents_deleted", 0),
error_message=data.get("error_message"),
Expand Down
30 changes: 10 additions & 20 deletions apps/agent-gateway/app/store/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,17 @@


class Store(Protocol):
async def ensure_default_org(self, *, uid: str) -> Org:
...
async def ensure_default_org(self, *, uid: str) -> Org: ...

async def list_orgs(self, *, uid: str) -> list[Org]:
...
async def list_orgs(self, *, uid: str) -> list[Org]: ...

async def create_org(self, *, uid: str, name: str) -> Org:
...
async def create_org(self, *, uid: str, name: str) -> Org: ...

async def set_default_org(self, *, uid: str, org_id: str) -> None:
...
async def set_default_org(self, *, uid: str, org_id: str) -> None: ...

async def get_default_org_id(self, *, uid: str) -> str | None:
...
async def get_default_org_id(self, *, uid: str) -> str | None: ...

async def get_membership(self, *, uid: str, org_id: str) -> Membership | None:
...
async def get_membership(self, *, uid: str, org_id: str) -> Membership | None: ...

async def create_run(
self,
Expand All @@ -37,8 +31,7 @@ async def create_run(
session_id: str,
agent: str,
prompt: str,
) -> Run:
...
) -> Run: ...

async def update_run(
self,
Expand All @@ -47,11 +40,8 @@ async def update_run(
org_id: str,
status: str,
final_text: str | None = None,
) -> None:
...
) -> None: ...

async def list_runs(self, *, uid: str, org_id: str, limit: int = 50) -> list[Run]:
...
async def list_runs(self, *, uid: str, org_id: str, limit: int = 50) -> list[Run]: ...

async def get_run(self, *, uid: str, org_id: str, run_id: str) -> Run | None:
...
async def get_run(self, *, uid: str, org_id: str, run_id: str) -> Run | None: ...
4 changes: 2 additions & 2 deletions apps/agent-gateway/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
fastapi>=0.109.0
uvicorn[standard]>=0.27.0
google-cloud-aiplatform>=1.38.0
google-genai>=0.3.0
google-cloud-aiplatform==1.130.0
google-genai==1.60.0
pydantic>=2.5.0
sse-starlette>=2.0.0
python-multipart>=0.0.9
Expand Down
2 changes: 1 addition & 1 deletion packages/vertice-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ authors = [
{ name = "Vertice-MAXIMUS", email = "gemini@vertice.ai" },
]
dependencies = [
"google-cloud-aiplatform>=1.38.0",
"google-cloud-aiplatform>=1.130.0",
"google-cloud-alloydb-connector>=1.0.0",
"pgvector>=0.2.0",
"pydantic>=2.5.0",
Expand Down
1 change: 1 addition & 0 deletions packages/vertice-core/src/agents/coder/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Coder Agent - Fast code generation specialist."""

from .agent import CoderAgent, coder

__all__ = ["CoderAgent", "coder"]
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def __init__(
self._iteration_count = 0
self._start_time: Optional[float] = None

self._on_state_change: Optional[
Callable[[OrchestratorState, OrchestratorState], None]
] = None
self._on_state_change: Optional[Callable[[OrchestratorState, OrchestratorState], None]] = (
None
)
self._on_step_complete: Optional[Callable[[ExecutionStep, ExecutionResult], None]] = None

def _transition_to(
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions packages/vertice-core/src/vertice_core/adk/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ def get_requirements(self) -> List[str]:
"""
return [
"vertice-core",
"google-cloud-aiplatform==1.115.0",
"google-genai==1.2.0",
"google-cloud-aiplatform==1.130.0",
"google-genai==1.60.0",
"pydantic>=2.5.0",
]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def __init__(
self._iteration_count = 0
self._start_time: Optional[float] = None

self._on_state_change: Optional[
Callable[[OrchestratorState, OrchestratorState], None]
] = None
self._on_state_change: Optional[Callable[[OrchestratorState, OrchestratorState], None]] = (
None
)
self._on_step_complete: Optional[Callable[[ExecutionStep, ExecutionResult], None]] = None

def _transition_to(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Router Module - Semantic routing for agent selection.
"""


from .cache import RouterCacheMixin
from .router import SemanticRouter
from .similarity import SimilarityEngine
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,26 +71,23 @@ def __init__(self, provider: str, retry_after: Optional[int] = None) -> None:
class ProviderProtocol(Protocol):
"""Protocol for LLM providers."""

def is_available(self) -> bool:
...
def is_available(self) -> bool: ...

async def stream_chat(
self,
messages: List[Dict[str, str]],
max_tokens: int = DEFAULT_MAX_TOKENS,
temperature: float = DEFAULT_TEMPERATURE,
**kwargs: Any,
) -> AsyncIterator[str]:
...
) -> AsyncIterator[str]: ...

async def generate(
self,
messages: List[Dict[str, str]],
max_tokens: int = DEFAULT_MAX_TOKENS,
temperature: float = DEFAULT_TEMPERATURE,
**kwargs: Any,
) -> str:
...
) -> str: ...


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,15 @@
class MemorySubsystem(Protocol):
"""Protocol for memory subsystems that support search."""

def search(self, query: str, limit: int = 5) -> Coroutine[Any, Any, List[Any]]:
...
def search(self, query: str, limit: int = 5) -> Coroutine[Any, Any, List[Any]]: ...


class CoreMemoryProtocol(Protocol):
"""Protocol for core memory."""

def get_persona(self) -> Dict[str, Any]:
...
def get_persona(self) -> Dict[str, Any]: ...

def to_context_string(self) -> str:
...
def to_context_string(self) -> str: ...


class ActiveRetrieval:
Expand Down
Loading
Loading