Skip to content

Commit 22f8e90

Browse files
committed
examples: add gpt-oss tools
1 parent fe91357 commit 22f8e90

File tree

3 files changed

+183
-0
lines changed

3 files changed

+183
-0
lines changed

examples/README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@ See [ollama/docs/api.md](https://github.com/ollama/ollama/blob/main/docs/api.md)
2727
- [async-tools.py](async-tools.py)
2828
- [multi-tool.py](multi-tool.py) - Using multiple tools, with thinking enabled
2929

30+
#### Agent loop
31+
- [gpt-oss-tools.py](gpt-oss-tools.py) - Using tools with gpt-oss
32+
- [gpt-oss-tools-stream.py](gpt-oss-tools-stream.py) - Using tools with gpt-oss, with streaming enabled
33+
3034

3135
### Multimodal with Images - Chat with a multimodal (image chat) model
3236
- [multimodal-chat.py](multimodal-chat.py)

examples/gpt-oss-tools-stream.py

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
from typing import Iterator
2+
from ollama import chat
3+
4+
5+
import random
6+
7+
from ollama._types import ChatResponse
8+
9+
10+
def get_weather(city: str) -> str:
11+
"""
12+
Get the current temperature for a city
13+
14+
Args:
15+
city (str): The name of the city
16+
17+
Returns:
18+
str: The current temperature
19+
"""
20+
temperatures = list(range(-10, 35))
21+
22+
temp = random.choice(temperatures)
23+
24+
return f"The temperature in {city} is {temp}°C"
25+
26+
27+
def get_weather_conditions(city: str) -> str:
28+
"""
29+
Get the weather conditions for a city
30+
31+
Args:
32+
city (str): The name of the city
33+
34+
Returns:
35+
str: The current weather conditions
36+
"""
37+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
38+
return random.choice(conditions)
39+
40+
41+
available_tools = {
42+
'get_weather': get_weather,
43+
'get_weather_conditions': get_weather_conditions
44+
}
45+
46+
messages = [
47+
{
48+
'role': 'user',
49+
'content': 'What is the weather like in London? What are the conditions in Toronto?'
50+
}
51+
]
52+
53+
54+
55+
model = 'gpt-oss:20b'
56+
# gpt-oss can call tools while "thinking"
57+
# a loop is needed to call the tools and get the results
58+
final = True
59+
while True:
60+
response_stream: Iterator[ChatResponse] = chat(
61+
model=model,
62+
messages=messages,
63+
tools=[get_weather, get_weather_conditions],
64+
stream=True
65+
)
66+
67+
for chunk in response_stream:
68+
if chunk.message.content:
69+
if not (chunk.message.thinking or chunk.message.thinking == "") and final:
70+
print("\nFinal result: ")
71+
final = False
72+
print(chunk.message.content, end='', flush=True)
73+
if chunk.message.thinking:
74+
print(chunk.message.thinking, end='', flush=True)
75+
76+
print()
77+
78+
if chunk.message.tool_calls:
79+
for tool_call in chunk.message.tool_calls:
80+
function_to_call = available_tools.get(tool_call.function.name)
81+
if function_to_call:
82+
print("\nCalling tool: ", tool_call.function.name, "with arguments: ", tool_call.function.arguments)
83+
result = function_to_call(**tool_call.function.arguments)
84+
print("Tool result: ", result + "\n")
85+
86+
messages.append(chunk.message)
87+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
88+
else:
89+
print(f"Tool {tool_call.function.name} not found")
90+
91+
else:
92+
# no more tool calls, we can stop the loop
93+
break

examples/gpt-oss-tools.py

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
from ollama import chat
2+
3+
4+
import random
5+
6+
from ollama._types import ChatResponse
7+
8+
9+
def get_weather(city: str) -> str:
10+
"""
11+
Get the current temperature for a city
12+
13+
Args:
14+
city (str): The name of the city
15+
16+
Returns:
17+
str: The current temperature
18+
"""
19+
temperatures = list(range(-10, 35))
20+
21+
temp = random.choice(temperatures)
22+
23+
return f"The temperature in {city} is {temp}°C"
24+
25+
26+
def get_weather_conditions(city: str) -> str:
27+
"""
28+
Get the weather conditions for a city
29+
30+
Args:
31+
city (str): The name of the city
32+
33+
Returns:
34+
str: The current weather conditions
35+
"""
36+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
37+
return random.choice(conditions)
38+
39+
40+
available_tools = {
41+
'get_weather': get_weather,
42+
'get_weather_conditions': get_weather_conditions
43+
}
44+
45+
messages = [
46+
{
47+
'role': 'user',
48+
'content': 'What is the weather like in London? What are the conditions in Toronto?'
49+
}
50+
]
51+
52+
53+
54+
model = 'gpt-oss:20b'
55+
# gpt-oss can call tools while "thinking"
56+
# a loop is needed to call the tools and get the results
57+
while True:
58+
response: ChatResponse = chat(
59+
model=model,
60+
messages=messages,
61+
tools=[get_weather, get_weather_conditions]
62+
)
63+
64+
if response.message.content:
65+
print("Content: ")
66+
print(response.message.content + "\n")
67+
if response.message.thinking:
68+
print("Thinking: ")
69+
print(response.message.thinking + "\n")
70+
71+
if response.message.tool_calls:
72+
for tool_call in response.message.tool_calls:
73+
function_to_call = available_tools.get(tool_call.function.name)
74+
if function_to_call:
75+
result = function_to_call(**tool_call.function.arguments)
76+
print("Result from tool call name: ", tool_call.function.name, "with arguments: ", tool_call.function.arguments, "result: ", result + "\n")
77+
78+
messages.append(response.message)
79+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
80+
else:
81+
print(f"Tool {tool_call.function.name} not found")
82+
83+
else:
84+
# no more tool calls, we can stop the loop
85+
break
86+

0 commit comments

Comments
 (0)