Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion examples/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@
TOOLHOUSE_API_KEY=
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GROQCLOUD_API_KEY=
GROQCLOUD_API_KEY=
SAMBANOVA_API_KEY=
67 changes: 67 additions & 0 deletions examples/sample_run_local_tools_sambanova.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
"""Sambanova Sample"""
import os
from typing import List
from dotenv import load_dotenv
from openai import OpenAI
from toolhouse import Toolhouse

# Make sure to set up the .env file according to the .env.example file.
load_dotenv()

TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY")
SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY")


local_tools = [
{'type': 'function',
'function':
{
'name': 'hello',
'description': 'The user receives a customized hello message from a city and returns it to the user.',
'parameters': {
'type': 'object',
'properties': {
'city': {'type': 'string', 'description': 'The city where you are from'}
}},
'required': ['city']
}}]

# Initialize Toolhouse with the OpenAI provider
th = Toolhouse(api_key=TH_API_KEY, provider="openai")
th.set_metadata("id", "fabio")
th.set_metadata("timezone", 5)


@th.register_local_tool("hello")
def hello_tool(city: str):
"""Return a Hello message from a specific city."""
return f"Hello from {city}!!!"


client = OpenAI(
api_key=SAMBANOVA_API_KEY,
base_url="https://api.sambanova.ai/v1/",
)

messages: List = [{
"role": "user",
"content":
"Can I get a hello from Rome?"
}]

response = client.chat.completions.create(
model='Meta-Llama-3.1-405B-Instruct',
messages=messages,
tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse
)

# Run the tools using the Toolhouse client with the created message
messages += th.run_tools(response)

response = client.chat.completions.create(
model='Meta-Llama-3.1-405B-Instruct',
messages=messages,
tools=th.get_tools() + local_tools # Retrieve tools installed from Toolhouse
)

print(response.choices[0].message.content)
65 changes: 65 additions & 0 deletions examples/sample_run_local_tools_sambanova_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
"""SambaNova Sample"""

import os
from typing import List
from dotenv import load_dotenv
from openai import OpenAI
from toolhouse import Toolhouse # Import the Toolhouse SDK
from toolhouse.models.Stream import ToolhouseStreamStorage # Import the Toolhouse Stream Storage

# Make sure to set up the .env file according to the .env.example file.
load_dotenv()

TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY")
SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY")

client = OpenAI(
api_key=SAMBANOVA_API_KEY,
base_url="https://api.sambanova.ai/v1/",
)

local_tools = [
{
"type": "function",
"function": {
"name": "hello",
"description": "The user receives a customized hello message from a city and returns it to the user.",
"parameters": {
"type": "object",
"properties": {"city": {"type": "string", "description": "The city where you are from"}},
},
"required": ["city"],
},
}
]

# Initialize Toolhouse with the OpenAI provider
th = Toolhouse(api_key=TH_API_KEY, provider="openai")
th.set_metadata("id", "fabio")
th.set_metadata("timezone", 5)


@th.register_local_tool("hello")
def hello_tool(city: str):
"""Return a Hello message from a specific city."""
return f"Hello from {city}!!!"


messages: List = [{"role": "user", "content": "Can I get a hello from Rome?"}]


stream = client.chat.completions.create(
model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools, stream=True # Retrieve tools installed from Toolhouse
)

# Use the stream and save blocks
stream_storage = ToolhouseStreamStorage()
for block in stream: # pylint: disable=E1133
print(block)
stream_storage.add(block)

# Run the tools using the Toolhouse client with the created message
messages += th.run_tools(stream_storage)

response = client.chat.completions.create(model="Meta-Llama-3.3-70B-Instruct", messages=messages, tools=th.get_tools() + local_tools) # Retrieve tools installed from Toolhouse
print(response.choices[0].message.content)
44 changes: 44 additions & 0 deletions examples/sample_sambanova_openai_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""SambaNova on OpenAI Client Sample"""
import os
from typing import List
from dotenv import load_dotenv
from openai import OpenAI
from toolhouse import Toolhouse # Import the Toolhouse SDK

# Make sure to set up the .env file according to the .env.example file.
load_dotenv()

TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY")
SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY")

client = OpenAI(
api_key=SAMBANOVA_API_KEY,
base_url="https://api.sambanova.ai/v1/"
)

# Initialize Toolhouse with the OpenAI provider
th = Toolhouse(api_key=TH_API_KEY, provider="openai")

messages: List = [{
"role": "user",
"content":
"Scrape data from https://docs.sambanova.ai/cloud/docs/get-started/overview and tell me what SambaNova Cloud is."
}]

response = client.chat.completions.create(
model='Meta-Llama-3.3-70B-Instruct',
messages=messages,
# Retrieve tools installed from Toolhouse
tools=th.get_tools()
)

# Run the tools using the Toolhouse client with the created message
messages += th.run_tools(response)

response = client.chat.completions.create(
model="Meta-Llama-3.3-70B-Instruct",
messages=messages,
# Retrieve tools installed from Toolhouse
tools=th.get_tools()
)
print(response.choices[0].message.content)
52 changes: 52 additions & 0 deletions examples/sample_sambanova_openai_client_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
"""SambaNova on OpenAI Sample"""
import os
from typing import List
from dotenv import load_dotenv
from openai import OpenAI
from toolhouse import Toolhouse # Import the Toolhouse SDK
from toolhouse.models.Stream import ToolhouseStreamStorage

# Make sure to set up the .env file according to the .env.example file.
load_dotenv()

TH_API_KEY = os.getenv("TOOLHOUSE_API_KEY")
SAMBANOVA_API_KEY = os.getenv("SAMBANOVA_API_KEY")

client = OpenAI(
api_key=SAMBANOVA_API_KEY,
base_url="https://api.sambanova.ai/v1/",
)

# Initialize Toolhouse with the OpenAI provider
th = Toolhouse(api_key=TH_API_KEY, provider="openai")

messages: List = [{
"role": "user",
"content":
"Generate code to calculate the Fibonacci sequence to 100." "Execute it and give me the result"
}]

stream = client.chat.completions.create(
model='Meta-Llama-3.1-405B-Instruct',
messages=messages,
# Retrieve tools installed from Toolhouse
tools=th.get_tools(),
stream=True
)

# Use the stream and save blocks
stream_storage = ToolhouseStreamStorage()
for block in stream: # pylint: disable=E1133
print(block)
stream_storage.add(block)

# Run the tools using the Toolhouse client with the created message
messages += th.run_tools(stream_storage)

response = client.chat.completions.create(
model="Meta-Llama-3.1-405B-Instruct",
messages=messages,
# Retrieve tools installed from Toolhouse
tools=th.get_tools()
)
print(response.choices[0].message.content)