From 72f0bc4d9b3133e53d3a1bf770f2c22ba42c51dd Mon Sep 17 00:00:00 2001 From: Kevin Hermawan <84965338+kevinhermawan@users.noreply.github.com> Date: Thu, 31 Oct 2024 05:49:50 +0700 Subject: [PATCH] feat: conforms to `Sendable` --- .github/workflows/code-quality.yml | 6 ++--- .github/workflows/deploy-docs.yml | 2 +- Package.resolved | 4 ++-- Package.swift | 2 +- .../Playground.xcodeproj/project.pbxproj | 4 ++-- .../xcshareddata/swiftpm/Package.resolved | 8 +++---- .../Playground/ViewModels/AppViewModel.swift | 3 ++- Sources/LLMChatOpenAI/ChatCompletion.swift | 22 +++++++++---------- .../LLMChatOpenAI/ChatCompletionChunk.swift | 22 +++++++++---------- Sources/LLMChatOpenAI/ChatMessage.swift | 8 +++---- Sources/LLMChatOpenAI/ChatOptions.swift | 8 +++---- Sources/LLMChatOpenAI/LLMChatOpenAI.swift | 2 +- .../LLMChatOpenAI/LLMChatOpenAIError.swift | 2 +- 13 files changed, 47 insertions(+), 46 deletions(-) diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index e11269c..1b874eb 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -10,16 +10,16 @@ on: jobs: test-ios: - runs-on: macos-14 + runs-on: macos-15 steps: - uses: actions/checkout@v4 - name: Build and test - run: xcodebuild test -scheme LLMChatOpenAI -destination 'platform=iOS Simulator,name=iPhone 15 Pro' + run: xcodebuild test -scheme LLMChatOpenAI -destination 'platform=iOS Simulator,name=iPhone 16 Pro' test-macos: - runs-on: macos-14 + runs-on: macos-15 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 1dfb177..a454ee3 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -15,7 +15,7 @@ concurrency: jobs: deploy: - runs-on: macos-14 + runs-on: macos-15 environment: name: github-pages diff --git a/Package.resolved b/Package.resolved index c51a73a..66053cc 100644 --- a/Package.resolved +++ b/Package.resolved @@ -23,8 +23,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/kevinhermawan/swift-json-schema.git", "state" : { - "revision" : "48cade5fb09c6fdc86adcf9af7e43a2c7582a65a", - "version" : "1.0.3" + "revision" : "4a9284e44d8ef2bfc863734e9ff535909ae6b27a", + "version" : "2.0.1" } } ], diff --git a/Package.swift b/Package.swift index 087d9f6..2cd517c 100644 --- a/Package.swift +++ b/Package.swift @@ -12,7 +12,7 @@ let package = Package( targets: ["LLMChatOpenAI"]), ], dependencies: [ - .package(url: "https://github.com/kevinhermawan/swift-json-schema.git", exact: "1.0.3"), + .package(url: "https://github.com/kevinhermawan/swift-json-schema.git", .upToNextMajor(from: "2.0.1")), .package(url: "https://github.com/apple/swift-docc-plugin.git", .upToNextMajor(from: "1.4.3")) ], targets: [ diff --git a/Playground/Playground.xcodeproj/project.pbxproj b/Playground/Playground.xcodeproj/project.pbxproj index 6dee7b7..4c3163a 100644 --- a/Playground/Playground.xcodeproj/project.pbxproj +++ b/Playground/Playground.xcodeproj/project.pbxproj @@ -286,7 +286,7 @@ SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; + SWIFT_VERSION = 6.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; @@ -320,7 +320,7 @@ SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; + SWIFT_VERSION = 6.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Release; diff --git a/Playground/Playground.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Playground/Playground.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 49e43fb..46cad00 100644 --- a/Playground/Playground.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/Playground/Playground.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -1,5 +1,5 @@ { - "originHash" : "7172a3f79f518d210e3e3d0402571927d2eec7848017ed81b4591457462765e0", + "originHash" : "7cf80a4b4902232e6d00899a52f8ffaa6e750ea08507cc8e810cf9b3200f2ca2", "pins" : [ { "identity" : "swift-ai-model-retriever", @@ -7,7 +7,7 @@ "location" : "https://github.com/kevinhermawan/swift-ai-model-retriever.git", "state" : { "branch" : "main", - "revision" : "5d22906f1bedcb53452257c784ebffa72e9ad1cb" + "revision" : "b9cd7cd4a68969b4158a2a5a0bc59311028737da" } }, { @@ -15,8 +15,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/kevinhermawan/swift-json-schema.git", "state" : { - "revision" : "48cade5fb09c6fdc86adcf9af7e43a2c7582a65a", - "version" : "1.0.3" + "revision" : "4a9284e44d8ef2bfc863734e9ff535909ae6b27a", + "version" : "2.0.1" } } ], diff --git a/Playground/Playground/ViewModels/AppViewModel.swift b/Playground/Playground/ViewModels/AppViewModel.swift index 4dc87ea..07aa613 100644 --- a/Playground/Playground/ViewModels/AppViewModel.swift +++ b/Playground/Playground/ViewModels/AppViewModel.swift @@ -15,9 +15,10 @@ enum ServiceProvider: String, CaseIterable { case groq = "Groq" } +@MainActor @Observable final class AppViewModel { - var stream: Bool = false + var stream: Bool = true var openaiAPIKey: String = "" var openRouterAPIKey: String = "" var groqAPIKey: String = "" diff --git a/Sources/LLMChatOpenAI/ChatCompletion.swift b/Sources/LLMChatOpenAI/ChatCompletion.swift index d962a7d..b55a3c7 100644 --- a/Sources/LLMChatOpenAI/ChatCompletion.swift +++ b/Sources/LLMChatOpenAI/ChatCompletion.swift @@ -8,7 +8,7 @@ import Foundation /// A struct that represents a chat completion response. -public struct ChatCompletion: Decodable { +public struct ChatCompletion: Decodable, Sendable { /// A unique identifier for the chat completion. public let id: String @@ -35,7 +35,7 @@ public struct ChatCompletion: Decodable { /// The usage statistics for the completion request. public let usage: Usage? - public struct Choice: Decodable { + public struct Choice: Decodable, Sendable { /// The index of the choice in the list of choices. public let index: Int @@ -49,7 +49,7 @@ public struct ChatCompletion: Decodable { /// Log probability information for the choice. public let logprobs: Logprobs? - public struct Message: Decodable { + public struct Message: Decodable, Sendable { /// The role of the author of this message. public let role: String @@ -62,7 +62,7 @@ public struct ChatCompletion: Decodable { /// An array of ``ToolCall`` generated by the model. public let toolCalls: [ToolCall]? - public struct ToolCall: Codable { + public struct ToolCall: Decodable, Sendable { /// The ID of the tool call. public let id: String @@ -72,7 +72,7 @@ public struct ChatCompletion: Decodable { /// The function that the model called. public let function: Function - public struct Function: Codable { + public struct Function: Codable, Sendable { /// The name of the function to call. public let name: String @@ -91,7 +91,7 @@ public struct ChatCompletion: Decodable { } /// The reason the model stopped generating tokens. - public enum FinishReason: String, Decodable { + public enum FinishReason: String, Decodable, Sendable { /// The model reached a natural stop point or a provided stop sequence. case stop @@ -105,14 +105,14 @@ public struct ChatCompletion: Decodable { case contentFilter = "content_filter" } - public struct Logprobs: Decodable { + public struct Logprobs: Decodable, Sendable { /// An array of message content tokens with log probability information. public let content: [TokenInfo]? /// An array of message refusal tokens with log probability information. public let refusal: [TokenInfo]? - public struct TokenInfo: Decodable { + public struct TokenInfo: Decodable, Sendable { /// The token. public let token: String @@ -144,7 +144,7 @@ public struct ChatCompletion: Decodable { } } - public struct Usage: Decodable { + public struct Usage: Decodable, Sendable { /// Number of tokens in the generated completion. public let completionTokens: Int @@ -160,7 +160,7 @@ public struct ChatCompletion: Decodable { /// Breakdown of tokens used in the prompt. public let promptTokensDetails: PromptTokensDetails? - public struct CompletionTokensDetails: Decodable { + public struct CompletionTokensDetails: Decodable, Sendable { /// Tokens generated by the model for reasoning. public let reasoningTokens: Int @@ -169,7 +169,7 @@ public struct ChatCompletion: Decodable { } } - public struct PromptTokensDetails: Decodable { + public struct PromptTokensDetails: Decodable, Sendable { /// Cached tokens present in the prompt. public let cachedTokens: Int diff --git a/Sources/LLMChatOpenAI/ChatCompletionChunk.swift b/Sources/LLMChatOpenAI/ChatCompletionChunk.swift index 92726b6..598613e 100644 --- a/Sources/LLMChatOpenAI/ChatCompletionChunk.swift +++ b/Sources/LLMChatOpenAI/ChatCompletionChunk.swift @@ -8,7 +8,7 @@ import Foundation /// A struct that represents a streamed chunk of a chat completion response. -public struct ChatCompletionChunk: Decodable { +public struct ChatCompletionChunk: Decodable, Sendable { /// A unique identifier for the chat completion. Each chunk has the same identifier. public let id: String @@ -35,7 +35,7 @@ public struct ChatCompletionChunk: Decodable { /// Usage statistics for the completion request. public let usage: Usage? - public struct Choice: Decodable { + public struct Choice: Decodable, Sendable { /// The index of the choice in the list of choices. public let index: Int @@ -49,7 +49,7 @@ public struct ChatCompletionChunk: Decodable { /// Log probability information for the choice. public let logprobs: Logprobs? - public struct Delta: Decodable { + public struct Delta: Decodable, Sendable { /// The role of the author of this message. public let role: String? @@ -62,7 +62,7 @@ public struct ChatCompletionChunk: Decodable { /// An array of ``ToolCall`` generated by the model. public var toolCalls: [ToolCall]? - public struct ToolCall: Decodable { + public struct ToolCall: Decodable, Sendable { /// The ID of the tool call. public let id: String? @@ -72,7 +72,7 @@ public struct ChatCompletionChunk: Decodable { /// The function that the model called. public var function: Function? - public struct Function: Decodable { + public struct Function: Decodable, Sendable { /// The name of the function to call. public var name: String? @@ -90,7 +90,7 @@ public struct ChatCompletionChunk: Decodable { } /// The reason the model stopped generating tokens. - public enum FinishReason: String, Decodable { + public enum FinishReason: String, Decodable, Sendable { /// The model reached a natural stop point or a provided stop sequence. case stop @@ -104,14 +104,14 @@ public struct ChatCompletionChunk: Decodable { case contentFilter = "content_filter" } - public struct Logprobs: Decodable { + public struct Logprobs: Decodable, Sendable { /// An array of message content tokens with log probability information. public let content: [TokenInfo]? /// An array of message refusal tokens with log probability information. public let refusal: [TokenInfo]? - public struct TokenInfo: Decodable { + public struct TokenInfo: Decodable, Sendable { /// The token. public let token: String @@ -136,7 +136,7 @@ public struct ChatCompletionChunk: Decodable { } } - public struct Usage: Decodable { + public struct Usage: Decodable, Sendable { /// Number of tokens in the generated completion. public let completionTokens: Int? @@ -152,7 +152,7 @@ public struct ChatCompletionChunk: Decodable { /// Breakdown of tokens used in the prompt. public let promptTokensDetails: PromptTokensDetails? - public struct CompletionTokensDetails: Decodable { + public struct CompletionTokensDetails: Decodable, Sendable { /// Tokens generated by the model for reasoning. public let reasoningTokens: Int @@ -161,7 +161,7 @@ public struct ChatCompletionChunk: Decodable { } } - public struct PromptTokensDetails: Decodable { + public struct PromptTokensDetails: Decodable, Sendable { /// Cached tokens present in the prompt. public let cachedTokens: Int diff --git a/Sources/LLMChatOpenAI/ChatMessage.swift b/Sources/LLMChatOpenAI/ChatMessage.swift index 78b195e..3f2e73a 100644 --- a/Sources/LLMChatOpenAI/ChatMessage.swift +++ b/Sources/LLMChatOpenAI/ChatMessage.swift @@ -8,7 +8,7 @@ import Foundation /// A struct that represents a message in a chat conversation. -public struct ChatMessage: Encodable { +public struct ChatMessage: Encodable, Sendable { /// The role of the message's author. public let role: Role @@ -19,20 +19,20 @@ public struct ChatMessage: Encodable { /// Provides the model information to differentiate between participants of the same role. public let name: String? - public enum Role: String, Codable { + public enum Role: String, Encodable, Sendable { case system case user case assistant } - public enum Content: Encodable { + public enum Content: Encodable, Sendable { /// Text content of the message. case text(String) /// Image content of the message. case image(String, detail: ImageDetail = .auto) - public enum ImageDetail: String, Encodable, CaseIterable { + public enum ImageDetail: String, Encodable, Sendable, CaseIterable { /// High detail mode. The model first sees the low res image (using 85 tokens) and then creates detailed crops using 170 tokens for each 512px x 512px tile. case high diff --git a/Sources/LLMChatOpenAI/ChatOptions.swift b/Sources/LLMChatOpenAI/ChatOptions.swift index 09c9168..b5390d0 100644 --- a/Sources/LLMChatOpenAI/ChatOptions.swift +++ b/Sources/LLMChatOpenAI/ChatOptions.swift @@ -9,7 +9,7 @@ import Foundation import JSONSchema /// A struct that represents the options of a chat completion request. -public struct ChatOptions: Encodable { +public struct ChatOptions: Encodable, Sendable { /// Number between -2.0 and 2.0. /// Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. public let frequencyPenalty: Double? @@ -118,7 +118,7 @@ public struct ChatOptions: Encodable { self.user = user } - public struct ResponseFormat: Encodable { + public struct ResponseFormat: Encodable, Sendable { /// The type of response format being defined. public let type: ResponseType @@ -145,13 +145,13 @@ public struct ChatOptions: Encodable { case jsonSchema = "json_schema" } - public enum ResponseType: String, Codable, CaseIterable { + public enum ResponseType: String, Codable, Sendable, CaseIterable { case text case jsonObject = "json_object" case jsonSchema = "json_schema" } - public struct Schema: Encodable { + public struct Schema: Encodable, Sendable { /// The name of the response format. /// Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. public let name: String diff --git a/Sources/LLMChatOpenAI/LLMChatOpenAI.swift b/Sources/LLMChatOpenAI/LLMChatOpenAI.swift index fa9479f..8cda3dd 100644 --- a/Sources/LLMChatOpenAI/LLMChatOpenAI.swift +++ b/Sources/LLMChatOpenAI/LLMChatOpenAI.swift @@ -8,7 +8,7 @@ import Foundation /// A struct that facilitates interactions with OpenAI and OpenAI-compatible chat completion APIs. -public struct LLMChatOpenAI { +public struct LLMChatOpenAI: Sendable { private let apiKey: String private let endpoint: URL private var headers: [String: String]? = nil diff --git a/Sources/LLMChatOpenAI/LLMChatOpenAIError.swift b/Sources/LLMChatOpenAI/LLMChatOpenAIError.swift index 7d0925c..ae6740f 100644 --- a/Sources/LLMChatOpenAI/LLMChatOpenAIError.swift +++ b/Sources/LLMChatOpenAI/LLMChatOpenAIError.swift @@ -8,7 +8,7 @@ import Foundation /// An enum that represents errors from the chat completion request. -public enum LLMChatOpenAIError: LocalizedError { +public enum LLMChatOpenAIError: LocalizedError, Sendable { /// A case that represents a server-side error response. /// /// - Parameter message: The error message from the server.