diff --git a/effectful/handlers/llm/agent.py b/docs/source/agent.py similarity index 57% rename from effectful/handlers/llm/agent.py rename to docs/source/agent.py index f80cf159..3a94e47d 100644 --- a/effectful/handlers/llm/agent.py +++ b/docs/source/agent.py @@ -1,34 +1,23 @@ import functools -from typing import Optional from effectful.handlers.llm import Template -from effectful.handlers.llm.providers import compute_response, format_model_input +from effectful.handlers.llm.providers import ( + LiteLLMProvider, + compute_response, + format_model_input, +) from effectful.ops.semantics import fwd, handler from effectful.ops.syntax import defop +from effectful.ops.types import NotHandled class Agent: - '''When inheriting from Agent, Template-valued methods will have the - previous history of the conversation injected prior to their prompts. - - Example: - - >>> class ConversationAgent(Agent): - ... @Template.define - ... def respond(self, message: str) -> str: - ... """Continue the conversation in response to the message '{message}'""" - ... raise NotImplementedError - - Any calls to `agent.format` will have the previous conversation history in their context. - - ''' - def __init__(self): - self.state = [] + self.state = [] # persist the list of messages @defop @staticmethod - def current_agent() -> Optional["Agent"]: + def current_agent() -> "Agent | None": return None def __init_subclass__(cls): @@ -51,14 +40,33 @@ def wrapper(self, *args, **kwargs): setattr(cls, method_name, wrapper) def _format_model_input(self, template, other, *args, **kwargs): + # update prompt with previous list of messages prompt = fwd() if Agent.current_agent() is self: assert self is other - prompt = self.state + prompt + self.state.extend(prompt) + prompt = self.state return prompt def _compute_response(self, *args, **kwargs): + # save response into persisted state response = fwd() if Agent.current_agent() is self: - self.state += response.output + self.state.append(response.choices[0].message.model_dump()) return response + + +if __name__ == "__main__": + + class ChatBot(Agent): + @Template.define + def send(self, user_input: str) -> str: + """User writes: {user_input}""" + raise NotHandled + + provider = LiteLLMProvider() + chatbot = ChatBot() + + with handler(provider): + print(chatbot.send("Hi!, how are you? I am in france.")) + print(chatbot.send("Remind me again, where am I?")) diff --git a/docs/source/agent_example.rst b/docs/source/agent_example.rst new file mode 100644 index 00000000..a9993c56 --- /dev/null +++ b/docs/source/agent_example.rst @@ -0,0 +1,18 @@ +Contextual LLM Agents +====================== +Here we give an example of using effectful to implement chatbot-style context-aware LLM agents. + +In the code below, we define a helper class :class:`Agent` which wraps its +subclasses' template operations in a wrapper that stores and persists +the history of prior interactions with the LLM: + - :func:`_format_model_input` wraps every prompt sent to the LLM and + stashes the generated API message into a state variable. + - :func:`_compute_response` wraps the response from the LLM provider and + stashes the returned message into the state. + +Using this we can construct an agent which remembers the context of +the conversation: + +.. literalinclude:: ./agent.py + :language: python + diff --git a/docs/source/index.rst b/docs/source/index.rst index 92aa0207..626c0a56 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -17,6 +17,7 @@ Table of Contents lambda_example semi_ring_example beam_search_example + agent_example .. toctree:: :maxdepth: 2