-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathagent.py
More file actions
150 lines (122 loc) · 4.49 KB
/
agent.py
File metadata and controls
150 lines (122 loc) · 4.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
# AI Imports
from langchain.chat_models import init_chat_model
from langchain.agents import create_agent
from langchain.tools import tool
from langchain.messages import HumanMessage
from langsmith import Client
from tavily import TavilyClient
from langgraph.checkpoint.memory import InMemorySaver
from langchain_core.messages import AIMessageChunk
# Data Imports
from dotenv import load_dotenv
# UX Imports
from textual.app import App, ComposeResult
from textual.widgets import Header, Footer, Input, Markdown, Label
from textual.containers import ScrollableContainer, Vertical
# Misc
import asyncio
# Load environment variables
load_dotenv()
# =================================== LLMOps =======================================
langsmith_client = Client()
tavily_client = TavilyClient()
llm = init_chat_model("gpt-4.1-mini")
checkpointer = InMemorySaver()
prompt = langsmith_client.pull_prompt("project_architecture_assistant", include_model=False)
if hasattr(prompt, 'messages') and len(prompt.messages) > 0:
msg = prompt.messages[0]
if hasattr(msg, 'prompt'):
SYSTEM_PROMPT = msg.prompt.template
elif hasattr(msg, 'content'):
SYSTEM_PROMPT = msg.content
else:
SYSTEM_PROMPT = "You are a helpful assistant."
else:
SYSTEM_PROMPT = "You are a helpful assistant."
# =================================== TOOLS ========================================
@tool("tavily_search", description="Search the web for general information based on a search query string.")
def tavily_search(query: str):
return tavily_client.search(query, search_depth="basic", max_results=5)
@tool("tavily_extract", description="Extract raw data/content from a specific URL. The input MUST be a valid URL.")
def tavily_extract(url: str):
result = tavily_client.extract(url, include_images=False)
for r in result.get("results", []):
r.pop("images", None)
return result
tools = [tavily_search, tavily_extract]
# =================================== AGENT ========================================
agent = create_agent(model=llm, tools=tools, system_prompt=SYSTEM_PROMPT, checkpointer=checkpointer)
# =================================== APP ==========================================
class ProjectArchitect(App):
CSS = """
Screen { background: #1a1a1a; }
#chat-container {
height: 1fr;
overflow-y: auto;
padding: 1 2;
}
#status {
height: 1;
padding: 0 2;
color: yellow;
display: none;
}
#status.visible {
display: block;
}
Input {
dock: bottom;
margin: 1 2;
border: tall cyan;
}
Markdown {
margin: 1 0;
padding: 1 2;
border: round #333;
}
"""
TITLE = "🤖 Project Architect"
def compose(self) -> ComposeResult:
yield Header()
yield ScrollableContainer(id="chat-container")
yield Label("", id="status")
yield Input(placeholder="Ask me anything... (Ctrl+C to exit)")
yield Footer()
async def on_input_submitted(self, event: Input.Submitted) -> None:
query = event.value.strip()
if not query:
return
event.input.clear()
event.input.disabled = True
container = self.query_one("#chat-container", ScrollableContainer)
status = self.query_one("#status", Label)
# User message
user_md = Markdown(f"**You:** {query}", color="green")
await container.mount(user_md)
# Agent response placeholder
response_md = Markdown("")
await container.mount(response_md)
container.scroll_end(animate=False)
text = ""
config = {"configurable": {"thread_id": "1"}}
messages = {"messages": [HumanMessage(content=query)]}
status.add_class("visible")
status.update("⏳ Thinking...")
async for token, metadata in agent.astream(
input=messages,
config=config,
stream_mode="messages"
):
if isinstance(token, AIMessageChunk):
if token.tool_call_chunks:
status.update("🔍 Researching...")
elif token.content:
status.update("📝 Building analysis...")
text += token.content
await response_md.update(text)
container.scroll_end(animate=False)
status.remove_class("visible")
event.input.disabled = False
event.input.focus()
if __name__ == "__main__":
ProjectArchitect().run()