diff --git a/examples/.env.example b/examples/.env.example index 0399aa4..9dfc3dc 100644 --- a/examples/.env.example +++ b/examples/.env.example @@ -3,4 +3,5 @@ TOOLHOUSE_API_KEY= OPENAI_API_KEY= ANTHROPIC_API_KEY= -GROQCLOUD_API_KEY= \ No newline at end of file +GROQCLOUD_API_KEY= +SAMBANOVA_API_KEY= diff --git a/examples/sample_run_local_tools_sambanova.py b/examples/sample_run_local_tools_sambanova.py new file mode 100644 index 0000000..b244ed7 --- /dev/null +++ b/examples/sample_run_local_tools_sambanova.py @@ -0,0 +1,67 @@ +"""Sambanova Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + + +local_tools = [ + {'type': 'function', + 'function': + { + 'name': 'hello', + 'description': 'The user receives a customized hello message from a city and returns it to the user.', + 'parameters': { + 'type': 'object', + 'properties': { + 'city': {'type': 'string', 'description': 'The city where you are from'} + }}, + 'required': ['city'] + }}] + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") +th.set_metadata("id", "fabio") +th.set_metadata("timezone", 5) + + +@th.register_local_tool("hello") +def hello_tool(city: str): + """Return a Hello message from a specific city.""" + return f"Hello from {city}!!!" + + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +messages: List = [{ + "role": "user", + "content": + "Can I get a hello from Rome?" + }] + +response = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse +) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(response) + +response = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse + ) + +print(response.choices[0].message.content) diff --git a/examples/sample_run_local_tools_sambanova_stream.py b/examples/sample_run_local_tools_sambanova_stream.py new file mode 100644 index 0000000..82ca192 --- /dev/null +++ b/examples/sample_run_local_tools_sambanova_stream.py @@ -0,0 +1,65 @@ +"""SambaNova Sample""" + +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK +from toolhouse.models.Stream import ToolhouseStreamStorage # Import the Toolhouse Stream Storage + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +local_tools = [ + { + "type": "function", + "function": { + "name": "hello", + "description": "The user receives a customized hello message from a city and returns it to the user.", + "parameters": { + "type": "object", + "properties": {"city": {"type": "string", "description": "The city where you are from"}}, + }, + "required": ["city"], + }, + } +] + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") +th.set_metadata("id", "fabio") +th.set_metadata("timezone", 5) + + +@th.register_local_tool("hello") +def hello_tool(city: str): + """Return a Hello message from a specific city.""" + return f"Hello from {city}!!!" + + +messages: List = [{"role": "user", "content": "Can I get a hello from Rome?"}] + + +stream = client.chat.completions.create( + model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools, stream=True # Retrieve tools installed from Toolhouse +) + +# Use the stream and save blocks +stream_storage = ToolhouseStreamStorage() +for block in stream: # pylint: disable=E1133 + print(block) + stream_storage.add(block) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(stream_storage) + +response = client.chat.completions.create(model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools) # Retrieve tools installed from Toolhouse +print(response.choices[0].message.content) diff --git a/examples/sample_sambanova_openai_client.py b/examples/sample_sambanova_openai_client.py new file mode 100644 index 0000000..83af7f6 --- /dev/null +++ b/examples/sample_sambanova_openai_client.py @@ -0,0 +1,44 @@ +"""SambaNova on OpenAI Client Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/" +) + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") + +messages: List = [{ + "role": "user", + "content": + "Scrape data from https://docs.sambanova.ai/cloud/docs/get-started/overview and tell me what SambaNova Cloud is." +}] + +response = client.chat.completions.create( + model='Meta-Llama-3.3-70B-Instruct', + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() +) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(response) + +response = client.chat.completions.create( + model="Meta-Llama-3.3-70B-Instruct", + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() + ) +print(response.choices[0].message.content) diff --git a/examples/sample_sambanova_openai_client_stream.py b/examples/sample_sambanova_openai_client_stream.py new file mode 100644 index 0000000..115bb34 --- /dev/null +++ b/examples/sample_sambanova_openai_client_stream.py @@ -0,0 +1,52 @@ +"""SambaNova on OpenAI Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK +from toolhouse.models.Stream import ToolhouseStreamStorage + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") + +messages: List = [{ + "role": "user", + "content": + "Generate code to calculate the Fibonacci sequence to 100." "Execute it and give me the result" +}] + +stream = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools(), + stream=True +) + +# Use the stream and save blocks +stream_storage = ToolhouseStreamStorage() +for block in stream: # pylint: disable=E1133 + print(block) + stream_storage.add(block) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(stream_storage) + +response = client.chat.completions.create( + model="Meta-Llama-3.1-405B-Instruct", + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() + ) +print(response.choices[0].message.content)