Skip to content

Commit 9409f1a

Browse files
committed
rfmt
1 parent 67c5ea3 commit 9409f1a

File tree

2 files changed

+94
-84
lines changed

2 files changed

+94
-84
lines changed

examples/tools/main.py

Lines changed: 70 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -1,85 +1,87 @@
11
import json
2-
import ollama
2+
import ollama
33
import asyncio
44

5+
56
# Simulates an API call to get flight times
67
# In a real application, this would fetch data from a live database or API
78
def get_flight_times(departure: str, arrival: str) -> str:
8-
flights = {
9-
"NYC-LAX": {"departure": "08:00 AM", "arrival": "11:30 AM", "duration": "5h 30m"},
10-
"LAX-NYC": {"departure": "02:00 PM", "arrival": "10:30 PM", "duration": "5h 30m"},
11-
"LHR-JFK": {"departure": "10:00 AM", "arrival": "01:00 PM", "duration": "8h 00m"},
12-
"JFK-LHR": {"departure": "09:00 PM", "arrival": "09:00 AM", "duration": "7h 00m"},
13-
"CDG-DXB": {"departure": "11:00 AM", "arrival": "08:00 PM", "duration": "6h 00m"},
14-
"DXB-CDG": {"departure": "03:00 AM", "arrival": "07:30 AM", "duration": "7h 30m"},
15-
}
9+
flights = {
10+
'NYC-LAX': {'departure': '08:00 AM', 'arrival': '11:30 AM', 'duration': '5h 30m'},
11+
'LAX-NYC': {'departure': '02:00 PM', 'arrival': '10:30 PM', 'duration': '5h 30m'},
12+
'LHR-JFK': {'departure': '10:00 AM', 'arrival': '01:00 PM', 'duration': '8h 00m'},
13+
'JFK-LHR': {'departure': '09:00 PM', 'arrival': '09:00 AM', 'duration': '7h 00m'},
14+
'CDG-DXB': {'departure': '11:00 AM', 'arrival': '08:00 PM', 'duration': '6h 00m'},
15+
'DXB-CDG': {'departure': '03:00 AM', 'arrival': '07:30 AM', 'duration': '7h 30m'},
16+
}
17+
18+
key = f'{departure}-{arrival}'.upper()
19+
return json.dumps(flights.get(key, {'error': 'Flight not found'}))
1620

17-
key = f"{departure}-{arrival}".upper()
18-
return json.dumps(flights.get(key, {"error": "Flight not found"}))
1921

2022
async def run(model: str):
21-
client = ollama.AsyncClient()
22-
# Initialize conversation with a user query
23-
messages = [{"role": "user", "content": "What is the flight time from New York (NYC) to Los Angeles (LAX)?"}]
23+
client = ollama.AsyncClient()
24+
# Initialize conversation with a user query
25+
messages = [{'role': 'user', 'content': 'What is the flight time from New York (NYC) to Los Angeles (LAX)?'}]
2426

25-
# First API call: Send the query and function description to the model
26-
response = await client.chat(
27-
model=model,
28-
messages=messages,
29-
tools=[
30-
{
31-
"type": "function",
32-
"function": {
33-
"name": "get_flight_times",
34-
"description": "Get the flight times between two cities",
35-
"parameters": {
36-
"type": "object",
37-
"properties": {
38-
"departure": {
39-
"type": "string",
40-
"description": "The departure city (airport code)",
41-
},
42-
"arrival": {
43-
"type": "string",
44-
"description": "The arrival city (airport code)",
45-
},
46-
},
47-
"required": ["departure", "arrival"],
48-
},
49-
},
27+
# First API call: Send the query and function description to the model
28+
response = await client.chat(
29+
model=model,
30+
messages=messages,
31+
tools=[
32+
{
33+
'type': 'function',
34+
'function': {
35+
'name': 'get_flight_times',
36+
'description': 'Get the flight times between two cities',
37+
'parameters': {
38+
'type': 'object',
39+
'properties': {
40+
'departure': {
41+
'type': 'string',
42+
'description': 'The departure city (airport code)',
43+
},
44+
'arrival': {
45+
'type': 'string',
46+
'description': 'The arrival city (airport code)',
47+
},
5048
},
51-
],
52-
)
53-
54-
# Add the model's response to the conversation history
55-
messages.append(response["message"])
49+
'required': ['departure', 'arrival'],
50+
},
51+
},
52+
},
53+
],
54+
)
5655

57-
# Check if the model decided to use the provided function
58-
if not response["message"].get("tool_calls"):
59-
print("The model didn't use the function. Its response was:")
60-
print(response["message"]["content"])
61-
return
56+
# Add the model's response to the conversation history
57+
messages.append(response['message'])
6258

63-
# Process function calls made by the model
64-
if response["message"].get("tool_calls"):
65-
available_functions = {
66-
"get_flight_times": get_flight_times,
59+
# Check if the model decided to use the provided function
60+
if not response['message'].get('tool_calls'):
61+
print("The model didn't use the function. Its response was:")
62+
print(response['message']['content'])
63+
return
64+
65+
# Process function calls made by the model
66+
if response['message'].get('tool_calls'):
67+
available_functions = {
68+
'get_flight_times': get_flight_times,
69+
}
70+
for tool in response['message']['tool_calls']:
71+
function_to_call = available_functions[tool['function']['name']]
72+
function_response = function_to_call(tool['function']['arguments']['departure'], tool['function']['arguments']['arrival'])
73+
# Add function response to the conversation
74+
messages.append(
75+
{
76+
'role': 'tool',
77+
'content': function_response,
6778
}
68-
for tool in response["message"]["tool_calls"]:
69-
function_to_call = available_functions[tool["function"]["name"]]
70-
function_response = function_to_call(
71-
tool["function"]["arguments"]["departure"],
72-
tool["function"]["arguments"]["arrival"]
73-
)
74-
# Add function response to the conversation
75-
messages.append({
76-
"role": "tool",
77-
"content": function_response,
78-
})
79+
)
80+
81+
# Second API call: Get final response from the model
82+
final_response = await client.chat(model=model, messages=messages)
83+
print(final_response['message']['content'])
7984

80-
# Second API call: Get final response from the model
81-
final_response = await client.chat(model=model,messages=messages)
82-
print(final_response["message"]["content"])
8385

8486
# Run the async function
85-
asyncio.run(run("mistral"))
87+
asyncio.run(run('mistral'))

ollama/_types.py

Lines changed: 24 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -52,25 +52,28 @@ class GenerateResponse(BaseGenerateResponse):
5252
context: Sequence[int]
5353
'Tokenized history up to the point of the response.'
5454

55+
5556
class ToolCallFunction(TypedDict):
5657
"""
5758
Tool call function.
5859
"""
59-
60+
6061
name: str
6162
'Name of the function.'
62-
63+
6364
args: NotRequired[Mapping[str, Any]]
6465
'Arguments of the function.'
65-
66+
67+
6668
class ToolCall(TypedDict):
6769
"""
6870
Model tool calls.
6971
"""
70-
72+
7173
function: ToolCallFunction
7274
'Function to be called.'
7375

76+
7477
class Message(TypedDict):
7578
"""
7679
Chat message.
@@ -93,30 +96,35 @@ class Message(TypedDict):
9396
9497
Valid image formats depend on the model. See the model card for more information.
9598
"""
96-
99+
97100
tool_calls: NotRequired[Sequence[ToolCall]]
98101
"""
99102
Tools calls to be made by the model.
100103
"""
101104

105+
102106
class Property(TypedDict):
103-
type: str
104-
description: str
105-
enum: NotRequired[Sequence[str]] # `enum` is optional and can be a list of strings
107+
type: str
108+
description: str
109+
enum: NotRequired[Sequence[str]] # `enum` is optional and can be a list of strings
110+
106111

107112
class Parameters(TypedDict):
108-
type: str
109-
required: Sequence[str]
110-
properties: Mapping[str, Property]
113+
type: str
114+
required: Sequence[str]
115+
properties: Mapping[str, Property]
116+
111117

112118
class ToolFunction(TypedDict):
113-
name: str
114-
description: str
115-
parameters: Parameters
119+
name: str
120+
description: str
121+
parameters: Parameters
122+
116123

117124
class Tool(TypedDict):
118-
type: str
119-
function: ToolFunction
125+
type: str
126+
function: ToolFunction
127+
120128

121129
class ChatResponse(BaseGenerateResponse):
122130
"""

0 commit comments

Comments
 (0)