Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 29 additions & 21 deletions effectful/handlers/llm/agent.py → docs/source/agent.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,23 @@
import functools
from typing import Optional

from effectful.handlers.llm import Template
from effectful.handlers.llm.providers import compute_response, format_model_input
from effectful.handlers.llm.providers import (
LiteLLMProvider,
compute_response,
format_model_input,
)
from effectful.ops.semantics import fwd, handler
from effectful.ops.syntax import defop
from effectful.ops.types import NotHandled


class Agent:
'''When inheriting from Agent, Template-valued methods will have the
previous history of the conversation injected prior to their prompts.

Example:

>>> class ConversationAgent(Agent):
... @Template.define
... def respond(self, message: str) -> str:
... """Continue the conversation in response to the message '{message}'"""
... raise NotImplementedError

Any calls to `agent.format` will have the previous conversation history in their context.

'''

def __init__(self):
self.state = []
self.state = [] # persist the list of messages

@defop
@staticmethod
def current_agent() -> Optional["Agent"]:
def current_agent() -> "Agent | None":
return None

def __init_subclass__(cls):
Expand All @@ -51,14 +40,33 @@ def wrapper(self, *args, **kwargs):
setattr(cls, method_name, wrapper)

def _format_model_input(self, template, other, *args, **kwargs):
# update prompt with previous list of messages
prompt = fwd()
if Agent.current_agent() is self:
assert self is other
prompt = self.state + prompt
self.state.extend(prompt)
prompt = self.state
return prompt

def _compute_response(self, *args, **kwargs):
# save response into persisted state
response = fwd()
if Agent.current_agent() is self:
self.state += response.output
self.state.append(response.choices[0].message.model_dump())
return response


if __name__ == "__main__":

class ChatBot(Agent):
@Template.define
def send(self, user_input: str) -> str:
"""User writes: {user_input}"""
raise NotHandled

provider = LiteLLMProvider()
chatbot = ChatBot()

with handler(provider):
print(chatbot.send("Hi!, how are you? I am in france."))
print(chatbot.send("Remind me again, where am I?"))
18 changes: 18 additions & 0 deletions docs/source/agent_example.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
Contextual LLM Agents
======================
Here we give an example of using effectful to implement chatbot-style context-aware LLM agents.

In the code below, we define a helper class :class:`Agent` which wraps its
subclasses' template operations in a wrapper that stores and persists
the history of prior interactions with the LLM:
- :func:`_format_model_input` wraps every prompt sent to the LLM and
stashes the generated API message into a state variable.
- :func:`_compute_response` wraps the response from the LLM provider and
stashes the returned message into the state.

Using this we can construct an agent which remembers the context of
the conversation:

.. literalinclude:: ./agent.py
:language: python

1 change: 1 addition & 0 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ Table of Contents
lambda_example
semi_ring_example
beam_search_example
agent_example

.. toctree::
:maxdepth: 2
Expand Down