From 9bd7f64c72202cf3ea073106291e1768ef98839a Mon Sep 17 00:00:00 2001 From: Luis Felipe Salazar Ucros <40307832+luisfucros@users.noreply.github.com> Date: Mon, 28 Apr 2025 10:05:55 -0500 Subject: [PATCH 1/5] add sambanova API Key --- examples/.env.example | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/.env.example b/examples/.env.example index 0399aa4..9dfc3dc 100644 --- a/examples/.env.example +++ b/examples/.env.example @@ -3,4 +3,5 @@ TOOLHOUSE_API_KEY= OPENAI_API_KEY= ANTHROPIC_API_KEY= -GROQCLOUD_API_KEY= \ No newline at end of file +GROQCLOUD_API_KEY= +SAMBANOVA_API_KEY= From 1c1ba2b8cd82d288d7444d8b2bbad6ede55b9423 Mon Sep 17 00:00:00 2001 From: Luis Felipe Salazar Ucros <40307832+luisfucros@users.noreply.github.com> Date: Mon, 28 Apr 2025 10:07:51 -0500 Subject: [PATCH 2/5] add sambanova sample run local tools stream --- ...sample_run_local_tools_sambanova_stream.py | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 examples/sample_run_local_tools_sambanova_stream.py diff --git a/examples/sample_run_local_tools_sambanova_stream.py b/examples/sample_run_local_tools_sambanova_stream.py new file mode 100644 index 0000000..82ca192 --- /dev/null +++ b/examples/sample_run_local_tools_sambanova_stream.py @@ -0,0 +1,65 @@ +"""SambaNova Sample""" + +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK +from toolhouse.models.Stream import ToolhouseStreamStorage # Import the Toolhouse Stream Storage + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +local_tools = [ + { + "type": "function", + "function": { + "name": "hello", + "description": "The user receives a customized hello message from a city and returns it to the user.", + "parameters": { + "type": "object", + "properties": {"city": {"type": "string", "description": "The city where you are from"}}, + }, + "required": ["city"], + }, + } +] + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") +th.set_metadata("id", "fabio") +th.set_metadata("timezone", 5) + + +@th.register_local_tool("hello") +def hello_tool(city: str): + """Return a Hello message from a specific city.""" + return f"Hello from {city}!!!" + + +messages: List = [{"role": "user", "content": "Can I get a hello from Rome?"}] + + +stream = client.chat.completions.create( + model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools, stream=True # Retrieve tools installed from Toolhouse +) + +# Use the stream and save blocks +stream_storage = ToolhouseStreamStorage() +for block in stream: # pylint: disable=E1133 + print(block) + stream_storage.add(block) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(stream_storage) + +response = client.chat.completions.create(model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools) # Retrieve tools installed from Toolhouse +print(response.choices[0].message.content) From 8c72d9647209f203e7c3648d4590c1089d786d0f Mon Sep 17 00:00:00 2001 From: Luis Felipe Salazar Ucros <40307832+luisfucros@users.noreply.github.com> Date: Mon, 28 Apr 2025 10:08:40 -0500 Subject: [PATCH 3/5] add sambanova sample run local tools --- examples/sample_run_local_tools_sambanova.py | 67 ++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 examples/sample_run_local_tools_sambanova.py diff --git a/examples/sample_run_local_tools_sambanova.py b/examples/sample_run_local_tools_sambanova.py new file mode 100644 index 0000000..b244ed7 --- /dev/null +++ b/examples/sample_run_local_tools_sambanova.py @@ -0,0 +1,67 @@ +"""Sambanova Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + + +local_tools = [ + {'type': 'function', + 'function': + { + 'name': 'hello', + 'description': 'The user receives a customized hello message from a city and returns it to the user.', + 'parameters': { + 'type': 'object', + 'properties': { + 'city': {'type': 'string', 'description': 'The city where you are from'} + }}, + 'required': ['city'] + }}] + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") +th.set_metadata("id", "fabio") +th.set_metadata("timezone", 5) + + +@th.register_local_tool("hello") +def hello_tool(city: str): + """Return a Hello message from a specific city.""" + return f"Hello from {city}!!!" + + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +messages: List = [{ + "role": "user", + "content": + "Can I get a hello from Rome?" + }] + +response = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse +) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(response) + +response = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse + ) + +print(response.choices[0].message.content) From e4a2be38ce803cd0bbbf2cd218e1c138ca0a6195 Mon Sep 17 00:00:00 2001 From: Luis Felipe Salazar Ucros <40307832+luisfucros@users.noreply.github.com> Date: Mon, 28 Apr 2025 10:09:29 -0500 Subject: [PATCH 4/5] add sambanova_openai_client_stream sample --- .../sample_sambanova_openai_client_stream.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 examples/sample_sambanova_openai_client_stream.py diff --git a/examples/sample_sambanova_openai_client_stream.py b/examples/sample_sambanova_openai_client_stream.py new file mode 100644 index 0000000..115bb34 --- /dev/null +++ b/examples/sample_sambanova_openai_client_stream.py @@ -0,0 +1,52 @@ +"""SambaNova on OpenAI Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK +from toolhouse.models.Stream import ToolhouseStreamStorage + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/", +) + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") + +messages: List = [{ + "role": "user", + "content": + "Generate code to calculate the Fibonacci sequence to 100." "Execute it and give me the result" +}] + +stream = client.chat.completions.create( + model='Meta-Llama-3.1-405B-Instruct', + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools(), + stream=True +) + +# Use the stream and save blocks +stream_storage = ToolhouseStreamStorage() +for block in stream: # pylint: disable=E1133 + print(block) + stream_storage.add(block) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(stream_storage) + +response = client.chat.completions.create( + model="Meta-Llama-3.1-405B-Instruct", + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() + ) +print(response.choices[0].message.content) From 34517d37116a978c8d5b50f7cb3caba2d1fc1d4d Mon Sep 17 00:00:00 2001 From: Luis Felipe Salazar Ucros <40307832+luisfucros@users.noreply.github.com> Date: Mon, 28 Apr 2025 10:10:07 -0500 Subject: [PATCH 5/5] add sambanova_openai_client sample --- examples/sample_sambanova_openai_client.py | 44 ++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 examples/sample_sambanova_openai_client.py diff --git a/examples/sample_sambanova_openai_client.py b/examples/sample_sambanova_openai_client.py new file mode 100644 index 0000000..83af7f6 --- /dev/null +++ b/examples/sample_sambanova_openai_client.py @@ -0,0 +1,44 @@ +"""SambaNova on OpenAI Client Sample""" +import os +from typing import List +from dotenv import load_dotenv +from openai import OpenAI +from toolhouse import Toolhouse # Import the Toolhouse SDK + +# Make sure to set up the .env file according to the .env.example file. +load_dotenv() + +TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY") +SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY") + +client = OpenAI( + api_key=SAMBANOVA_API_KEY, + base_url="https://api.sambanova.ai/v1/" +) + +# Initialize Toolhouse with the OpenAI provider +th = Toolhouse(api_key=TH_API_KEY, provider="openai") + +messages: List = [{ + "role": "user", + "content": + "Scrape data from https://docs.sambanova.ai/cloud/docs/get-started/overview and tell me what SambaNova Cloud is." +}] + +response = client.chat.completions.create( + model='Meta-Llama-3.3-70B-Instruct', + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() +) + +# Run the tools using the Toolhouse client with the created message +messages += th.run_tools(response) + +response = client.chat.completions.create( + model="Meta-Llama-3.3-70B-Instruct", + messages=messages, + # Retrieve tools installed from Toolhouse + tools=th.get_tools() + ) +print(response.choices[0].message.content)