Skip to content

Commit e06503a

Browse files
authored
Merge pull request #43 from fumito-ito/fix/example-interface
fix example interfaces
2 parents faea6ff + 251fa3c commit e06503a

File tree

7 files changed

+40
-22
lines changed

7 files changed

+40
-22
lines changed

Example.swiftpm/Package.resolved

Lines changed: 12 additions & 12 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Example.swiftpm/Protocol/MessagesSubject.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ protocol MessageSendable {
3737
func createMessage(
3838
_ messages: [Message],
3939
model: Model,
40-
system: String?,
40+
system: [SystemPrompt],
4141
maxTokens: Int,
4242
metaData: MetaData?,
4343
stopSequence: [String]?,
@@ -53,7 +53,7 @@ protocol MessageStreamable {
5353
func streamMessage(
5454
_ messages: [Message],
5555
model: Model,
56-
system: String?,
56+
system: [SystemPrompt],
5757
maxTokens: Int,
5858
metaData: MetaData?,
5959
stopSequence: [String]?,

Example.swiftpm/ViewModel/MockViewModel.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,13 +72,13 @@ import FunctionCalling
7272
}
7373

7474
struct MockMessageStreamable: MessageStreamable {
75-
func streamMessage(_ messages: [AnthropicSwiftSDK.Message], model: AnthropicSwiftSDK.Model, system: String?, maxTokens: Int, metaData: AnthropicSwiftSDK.MetaData?, stopSequence: [String]?, temperature: Double?, topP: Double?, topK: Int?, toolContainer: ToolContainer?, toolChoice: ToolChoice) async throws -> AsyncThrowingStream<any AnthropicSwiftSDK.StreamingResponse, any Error> {
75+
func streamMessage(_ messages: [AnthropicSwiftSDK.Message], model: AnthropicSwiftSDK.Model, system: [AnthropicSwiftSDK.SystemPrompt], maxTokens: Int, metaData: AnthropicSwiftSDK.MetaData?, stopSequence: [String]?, temperature: Double?, topP: Double?, topK: Int?, toolContainer: (any FunctionCalling.ToolContainer)?, toolChoice: AnthropicSwiftSDK.ToolChoice) async throws -> AsyncThrowingStream<any AnthropicSwiftSDK.StreamingResponse, any Error> {
7676
fatalError()
7777
}
7878
}
7979

8080
struct MockMessagesSendable: MessageSendable {
81-
func createMessage(_ messages: [AnthropicSwiftSDK.Message], model: AnthropicSwiftSDK.Model, system: String?, maxTokens: Int, metaData: AnthropicSwiftSDK.MetaData?, stopSequence: [String]?, temperature: Double?, topP: Double?, topK: Int?, toolContainer: ToolContainer?, toolChoice: ToolChoice) async throws -> AnthropicSwiftSDK.MessagesResponse {
81+
func createMessage(_ messages: [Message], model: Model, system: [SystemPrompt], maxTokens: Int, metaData: MetaData?, stopSequence: [String]?, temperature: Double?, topP: Double?, topK: Int?, toolContainer: (any ToolContainer)?, toolChoice: ToolChoice) async throws -> MessagesResponse {
8282
fatalError()
8383
}
8484
}

Example.swiftpm/ViewModel/SendViewModel.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import AnthropicSwiftSDK
4444
let result = try await messageHandler.createMessage(
4545
[message],
4646
model: model,
47-
system: nil,
47+
system: [],
4848
maxTokens: 1024,
4949
metaData: nil,
5050
stopSequence: nil,

Example.swiftpm/ViewModel/StreamViewModel.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import AnthropicSwiftSDK
4444
let stream = try await self.messageHandler.streamMessage(
4545
[message],
4646
model: model,
47-
system: nil,
47+
system: [],
4848
maxTokens: 1024,
4949
metaData: nil,
5050
stopSequence: nil,

Sources/AnthropicSwiftSDK-Bedrock/Messages.swift

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import Foundation
99
import AnthropicSwiftSDK
1010
import AWSBedrockRuntime
11+
import FunctionCalling
1112

1213
public struct Messages {
1314
/// Acceptable content type for response
@@ -33,6 +34,8 @@ public struct Messages {
3334
/// - temperature: The temperature parameter controls the randomness of the generated text. Default is `nil`.
3435
/// - topP: The nucleus sampling parameter. Default is `nil`.
3536
/// - topK: The top-k sampling parameter. Default is `nil`.
37+
/// - toolContainer: The tool provider for `tool_use`. Default is `nil`. This property is defined but not used for Bedrock.
38+
/// - toolChoice: The parameter for tool choice. Default is `.auto`. This property is defined but not used for Bedrock.
3639
/// - Returns: A `MessagesResponse` object representing the response from the Anthropic API.
3740
/// - Throws: An error if the request fails or if there's an issue decoding the response.
3841
public func createMessage(
@@ -44,7 +47,9 @@ public struct Messages {
4447
stopSequence: [String]? = nil,
4548
temperature: Double? = nil,
4649
topP: Double? = nil,
47-
topK: Int? = nil
50+
topK: Int? = nil,
51+
toolContainer: ToolContainer? = nil,
52+
toolChoice: ToolChoice = .auto
4853
) async throws -> MessagesResponse {
4954
// In the inference call, fill the body field with a JSON object that conforms the type call you want to make [Anthropic Claude Messages API](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html).
5055
let requestBody = MessagesRequest(
@@ -83,6 +88,8 @@ public struct Messages {
8388
/// - temperature: The temperature parameter controls the randomness of the generated text. Default is `nil`.
8489
/// - topP: The nucleus sampling parameter. Default is `nil`.
8590
/// - topK: The top-k sampling parameter. Default is `nil`.
91+
/// - toolContainer: The tool provider for `tool_use`. Default is `nil`. This property is defined but not used for Bedrock.
92+
/// - toolChoice: The parameter for tool choice. Default is `.auto`. This property is defined but not used for Bedrock.
8693
/// - Returns: An asynchronous throwing stream of `StreamingResponse` objects representing the streaming response from the Anthropic API.
8794
/// - Throws: An error if the request fails or if there's an issue parsing the streaming response.
8895
public func streamMessage(
@@ -94,7 +101,9 @@ public struct Messages {
94101
stopSequence: [String]? = nil,
95102
temperature: Double? = nil,
96103
topP: Double? = nil,
97-
topK: Int? = nil
104+
topK: Int? = nil,
105+
toolContainer: ToolContainer? = nil,
106+
toolChoice: ToolChoice = .auto
98107
) async throws -> AsyncThrowingStream<StreamingResponse, Error> {
99108
// In the inference call, fill the body field with a JSON object that conforms the type call you want to make [Anthropic Claude Messages API](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html ).
100109
let requestBody = MessagesRequest(

Sources/AnthropicSwiftSDK-VertexAI/Messages.swift

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import Foundation
99
import AnthropicSwiftSDK
10+
import FunctionCalling
1011

1112
public struct Messages {
1213
let projectId: String
@@ -33,6 +34,8 @@ public struct Messages {
3334
/// - temperature: The temperature parameter controls the randomness of the generated text. Default is `nil`.
3435
/// - topP: The nucleus sampling parameter. Default is `nil`.
3536
/// - topK: The top-k sampling parameter. Default is `nil`.
37+
/// - toolContainer: The tool provider for `tool_use`. Default is `nil`. This property is defined but not used for VertexAI.
38+
/// - toolChoice: The parameter for tool choice. Default is `.auto`. This property is defined but not used for VertexAI.
3639
/// - Returns: A `MessagesResponse` object representing the response from the Anthropic API.
3740
/// - Throws: An error if the request fails or if there's an issue decoding the response.
3841
public func createMessage(
@@ -44,7 +47,9 @@ public struct Messages {
4447
stopSequence: [String]? = nil,
4548
temperature: Double? = nil,
4649
topP: Double? = nil,
47-
topK: Int? = nil
50+
topK: Int? = nil,
51+
toolContainer: ToolContainer? = nil,
52+
toolChoice: ToolChoice = .auto
4853
) async throws -> MessagesResponse {
4954
let modelName = try model.vertexAIModelName
5055
let client = VertexAIClient(projectId: projectId, accessToken: accessToken, region: region, modelName: modelName, session: session)
@@ -87,6 +92,8 @@ public struct Messages {
8792
/// - temperature: The temperature parameter controls the randomness of the generated text. Default is `nil`.
8893
/// - topP: The nucleus sampling parameter. Default is `nil`.
8994
/// - topK: The top-k sampling parameter. Default is `nil`.
95+
/// - toolContainer: The tool provider for `tool_use`. Default is `nil`. This property is defined but not used for VertexAI.
96+
/// - toolChoice: The parameter for tool choice. Default is `.auto`. This property is defined but not used for VertexAI.
9097
/// - Returns: An asynchronous throwing stream of `StreamingResponse` objects representing the streaming response from the Anthropic API.
9198
/// - Throws: An error if the request fails or if there's an issue parsing the streaming response.
9299
public func streamMessage(
@@ -98,7 +105,9 @@ public struct Messages {
98105
stopSequence: [String]? = nil,
99106
temperature: Double? = nil,
100107
topP: Double? = nil,
101-
topK: Int? = nil
108+
topK: Int? = nil,
109+
toolContainer: ToolContainer? = nil,
110+
toolChoice: ToolChoice = .auto
102111
) async throws -> AsyncThrowingStream<StreamingResponse, Error> {
103112
let modelName = try model.vertexAIModelName
104113
let client = VertexAIClient(projectId: projectId, accessToken: accessToken, region: region, modelName: modelName, session: session)

0 commit comments

Comments
 (0)