From 044ba2a7346ca6cc7a3ff52b9edb4c0c32a8f0eb Mon Sep 17 00:00:00 2001 From: ParthSareen Date: Mon, 18 Aug 2025 19:35:01 -0700 Subject: [PATCH 1/2] examples/gpt-oss: fix examples --- examples/gpt-oss-tools-stream.py | 46 +++++++++++++++++++++++++------- examples/gpt-oss-tools.py | 23 +++++++++++++--- 2 files changed, 56 insertions(+), 13 deletions(-) diff --git a/examples/gpt-oss-tools-stream.py b/examples/gpt-oss-tools-stream.py index dee8c54..63eec55 100644 --- a/examples/gpt-oss-tools-stream.py +++ b/examples/gpt-oss-tools-stream.py @@ -1,7 +1,17 @@ +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "gpt-oss", +# "ollama", +# "rich", +# ] +# /// import random from typing import Iterator -from ollama import chat +from rich import print + +from ollama import Client from ollama._types import ChatResponse @@ -40,35 +50,53 @@ def get_weather_conditions(city: str) -> str: messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}] +client = Client( + # Ollama Turbo + # host="https://ollama.com", headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))} +) -model = 'gpt-oss:20b' +model = 'gpt-oss:120b' # gpt-oss can call tools while "thinking" # a loop is needed to call the tools and get the results final = True while True: - response_stream: Iterator[ChatResponse] = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True) + response_stream: Iterator[ChatResponse] = client.chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True) + tool_calls = [] + thinking = '' + content = '' for chunk in response_stream: + if chunk.message.tool_calls: + tool_calls.extend(chunk.message.tool_calls) + if chunk.message.content: if not (chunk.message.thinking or chunk.message.thinking == '') and final: - print('\nFinal result: ') + print('\n\n' + '='*10) + print('Final result: ') final = False print(chunk.message.content, end='', flush=True) + if chunk.message.thinking: + # accumulate thinking + thinking += chunk.message.thinking print(chunk.message.thinking, end='', flush=True) + + if thinking != '' or content != '': + messages.append({'role': 'assistant', 'thinking': thinking, 'content': content, 'tool_calls': tool_calls}) + print() - if chunk.message.tool_calls: - for tool_call in chunk.message.tool_calls: + if tool_calls: + for tool_call in tool_calls: function_to_call = available_tools.get(tool_call.function.name) if function_to_call: - print('\nCalling tool: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments) + print('\nCalling tool:', tool_call.function.name, 'with arguments: ', tool_call.function.arguments) result = function_to_call(**tool_call.function.arguments) print('Tool result: ', result + '\n') - messages.append(chunk.message) - messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name}) + result_message = {'role': 'tool', 'content': result, 'tool_name': tool_call.function.name} + messages.append(result_message) else: print(f'Tool {tool_call.function.name} not found') diff --git a/examples/gpt-oss-tools.py b/examples/gpt-oss-tools.py index 2cc9bd4..c9c27f1 100644 --- a/examples/gpt-oss-tools.py +++ b/examples/gpt-oss-tools.py @@ -1,6 +1,17 @@ +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "gpt-oss", +# "ollama", +# "rich", +# ] +# /// +import os import random -from ollama import chat +from rich import print + +from ollama import Client from ollama._types import ChatResponse @@ -40,11 +51,15 @@ def get_weather_conditions(city: str) -> str: messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}] +client = Client( + # Ollama Turbo + # host="https://ollama.com", headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))} +) model = 'gpt-oss:20b' # gpt-oss can call tools while "thinking" # a loop is needed to call the tools and get the results while True: - response: ChatResponse = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions]) + response: ChatResponse = client.chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions]) if response.message.content: print('Content: ') @@ -53,14 +68,14 @@ def get_weather_conditions(city: str) -> str: print('Thinking: ') print(response.message.thinking + '\n') + messages.append(response.message) + if response.message.tool_calls: for tool_call in response.message.tool_calls: function_to_call = available_tools.get(tool_call.function.name) if function_to_call: result = function_to_call(**tool_call.function.arguments) print('Result from tool call name: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments, 'result: ', result + '\n') - - messages.append(response.message) messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name}) else: print(f'Tool {tool_call.function.name} not found') From b9ece9bfda9c85d96d1dc27e85239b28a87d4969 Mon Sep 17 00:00:00 2001 From: ParthSareen Date: Mon, 18 Aug 2025 19:38:45 -0700 Subject: [PATCH 2/2] linter --- examples/gpt-oss-tools-stream.py | 3 +-- examples/gpt-oss-tools.py | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/examples/gpt-oss-tools-stream.py b/examples/gpt-oss-tools-stream.py index 63eec55..8d0a27f 100644 --- a/examples/gpt-oss-tools-stream.py +++ b/examples/gpt-oss-tools-stream.py @@ -71,7 +71,7 @@ def get_weather_conditions(city: str) -> str: if chunk.message.content: if not (chunk.message.thinking or chunk.message.thinking == '') and final: - print('\n\n' + '='*10) + print('\n\n' + '=' * 10) print('Final result: ') final = False print(chunk.message.content, end='', flush=True) @@ -81,7 +81,6 @@ def get_weather_conditions(city: str) -> str: thinking += chunk.message.thinking print(chunk.message.thinking, end='', flush=True) - if thinking != '' or content != '': messages.append({'role': 'assistant', 'thinking': thinking, 'content': content, 'tool_calls': tool_calls}) diff --git a/examples/gpt-oss-tools.py b/examples/gpt-oss-tools.py index c9c27f1..b250227 100644 --- a/examples/gpt-oss-tools.py +++ b/examples/gpt-oss-tools.py @@ -6,7 +6,6 @@ # "rich", # ] # /// -import os import random from rich import print