Skip to content

Commit 518ce24

Browse files
committed
examples: add gpt-oss tools
1 parent fe91357 commit 518ce24

File tree

3 files changed

+189
-0
lines changed

3 files changed

+189
-0
lines changed

examples/README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@ See [ollama/docs/api.md](https://github.com/ollama/ollama/blob/main/docs/api.md)
2727
- [async-tools.py](async-tools.py)
2828
- [multi-tool.py](multi-tool.py) - Using multiple tools, with thinking enabled
2929

30+
#### Agent loop
31+
- [gpt-oss-tools.py](gpt-oss-tools.py) - Using tools with gpt-oss
32+
- [gpt-oss-tools-stream.py](gpt-oss-tools-stream.py) - Using tools with gpt-oss, with streaming enabled
33+
3034

3135
### Multimodal with Images - Chat with a multimodal (image chat) model
3236
- [multimodal-chat.py](multimodal-chat.py)

examples/gpt-oss-tools-stream.py

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
from typing import Iterator
2+
from ollama import chat
3+
4+
5+
import random
6+
7+
from ollama._types import ChatResponse
8+
9+
10+
def get_weather(city: str) -> str:
11+
"""
12+
Get the current temperature for a city
13+
14+
Args:
15+
city (str): The name of the city
16+
17+
Returns:
18+
str: The current temperature
19+
"""
20+
# Mock temperature data
21+
temperatures = list(range(-10, 35))
22+
23+
temp = random.choice(temperatures)
24+
25+
return f"The temperature in {city} is {temp}°C"
26+
27+
28+
def get_weather_conditions(city: str) -> str:
29+
"""
30+
Get the weather conditions for a city
31+
32+
Args:
33+
city (str): The name of the city
34+
35+
Returns:
36+
str: The current weather conditions
37+
"""
38+
# Mock weather conditions
39+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
40+
return random.choice(conditions)
41+
42+
43+
available_tools = {
44+
'get_weather': get_weather,
45+
'get_weather_conditions': get_weather_conditions
46+
}
47+
48+
# Example usage
49+
messages = [
50+
{
51+
'role': 'user',
52+
'content': 'What is the weather like in London? What are the conditions in Toronto?'
53+
}
54+
]
55+
56+
57+
58+
model = 'gpt-oss:20b'
59+
# gpt-oss can call tools while "thinking"
60+
# a loop is needed to call the tools and get the results
61+
final = True
62+
while True:
63+
response_stream: Iterator[ChatResponse] = chat(
64+
model=model,
65+
messages=messages,
66+
tools=[get_weather, get_weather_conditions],
67+
stream=True
68+
)
69+
70+
for chunk in response_stream:
71+
if chunk.message.content:
72+
if not (chunk.message.thinking or chunk.message.thinking == "") and final:
73+
print("\nFinal result: ")
74+
final = False
75+
print(chunk.message.content, end='', flush=True)
76+
if chunk.message.thinking:
77+
print(chunk.message.thinking, end='', flush=True)
78+
79+
print()
80+
81+
if chunk.message.tool_calls:
82+
for tool_call in chunk.message.tool_calls:
83+
function_to_call = available_tools.get(tool_call.function.name)
84+
if function_to_call:
85+
print("\nCalling tool: ", tool_call.function.name, "with arguments: ", tool_call.function.arguments)
86+
result = function_to_call(**tool_call.function.arguments)
87+
print("Tool result: ", result + "\n")
88+
89+
messages.append(chunk.message)
90+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
91+
else:
92+
print(f"Tool {tool_call.function.name} not found")
93+
94+
else:
95+
# no more tool calls, we can stop the loop
96+
break

examples/gpt-oss-tools.py

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
from ollama import chat
2+
3+
4+
import random
5+
6+
from ollama._types import ChatResponse
7+
8+
9+
def get_weather(city: str) -> str:
10+
"""
11+
Get the current temperature for a city
12+
13+
Args:
14+
city (str): The name of the city
15+
16+
Returns:
17+
str: The current temperature
18+
"""
19+
# Mock temperature data
20+
temperatures = list(range(-10, 35))
21+
22+
temp = random.choice(temperatures)
23+
24+
return f"The temperature in {city} is {temp}°C"
25+
26+
27+
def get_weather_conditions(city: str) -> str:
28+
"""
29+
Get the weather conditions for a city
30+
31+
Args:
32+
city (str): The name of the city
33+
34+
Returns:
35+
str: The current weather conditions
36+
"""
37+
# Mock weather conditions
38+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
39+
return random.choice(conditions)
40+
41+
42+
available_tools = {
43+
'get_weather': get_weather,
44+
'get_weather_conditions': get_weather_conditions
45+
}
46+
47+
# Example usage
48+
messages = [
49+
{
50+
'role': 'user',
51+
'content': 'What is the weather like in London? What are the conditions in Toronto?'
52+
}
53+
]
54+
55+
56+
57+
model = 'gpt-oss:20b'
58+
# gpt-oss can call tools while "thinking"
59+
# a loop is needed to call the tools and get the results
60+
while True:
61+
response: ChatResponse = chat(
62+
model=model,
63+
messages=messages,
64+
tools=[get_weather, get_weather_conditions]
65+
)
66+
67+
if response.message.content:
68+
print("Content: ")
69+
print(response.message.content + "\n")
70+
if response.message.thinking:
71+
print("Thinking: ")
72+
print(response.message.thinking + "\n")
73+
74+
if response.message.tool_calls:
75+
for tool_call in response.message.tool_calls:
76+
function_to_call = available_tools.get(tool_call.function.name)
77+
if function_to_call:
78+
result = function_to_call(**tool_call.function.arguments)
79+
print("Result from tool call name: ", tool_call.function.name, "with arguments: ", tool_call.function.arguments, "result: ", result + "\n")
80+
81+
messages.append(response.message)
82+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
83+
else:
84+
print(f"Tool {tool_call.function.name} not found")
85+
86+
else:
87+
# no more tool calls, we can stop the loop
88+
break
89+

0 commit comments

Comments
 (0)