Skip to content

Commit

Permalink
Add render as markdown param, bump ChatGPTSwift to 2.3.1
Browse files Browse the repository at this point in the history
  • Loading branch information
alfianlosari committed Jun 17, 2024
1 parent 46446b2 commit 0b923c3
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 55 deletions.
6 changes: 3 additions & 3 deletions Package.resolved
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"originHash" : "51ed7ea331aea83343645d6dc32e6073d4a0478df108efc7368bb1601074bbbf",
"originHash" : "fb8df18310c7b72cbada89a0e11f5d021f0f6aac466a93c6a355317cef9a2a75",
"pins" : [
{
"identity" : "async-http-client",
Expand All @@ -15,8 +15,8 @@
"kind" : "remoteSourceControl",
"location" : "https://github.com/alfianlosari/ChatGPTSwift.git",
"state" : {
"revision" : "eaaec98632c3a722695768c80335951d3a3b6c6a",
"version" : "2.2.5"
"revision" : "f8b052c4e1507857cdf1b698eef81a14c20977c0",
"version" : "2.3.1"
}
},
{
Expand Down
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ let package = Package(
targets: ["ChatGPTUI"]),
],
dependencies: [
.package(url: "https://github.com/alfianlosari/ChatGPTSwift.git", from: "2.2.5"),
.package(url: "https://github.com/alfianlosari/ChatGPTSwift.git", from: "2.3.1"),
.package(url: "https://github.com/apple/swift-markdown.git", from: "0.3.0"),
.package(url: "https://github.com/alfianlosari/HighlighterSwift.git", from: "1.0.0"),
.package(url: "https://github.com/alfianlosari/SiriWaveView.git", from: "1.1.0")
Expand Down
94 changes: 54 additions & 40 deletions Sources/ChatGPTUI/ViewModels/TextChatViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,22 @@ open class TextChatViewModel<CustomContent: View> {
public var senderImage: String?
public var botImage: String?
public var useStreaming = true
public var renderAsMarkdown = true

public let api: ChatGPTAPI
public var model: ChatGPTModel
public var systemText: String
public var temperature: Double

public init(messages: [MessageRow<CustomContent>] = [], senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, apiKey: String) {
public init(messages: [MessageRow<CustomContent>] = [], senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) {
self.messages = messages
self.senderImage = senderImage
self.botImage = botImage
self.useStreaming = useStreaming
self.model = model
self.api = ChatGPTAPI(apiKey: apiKey)
self.systemText = systemText
self.renderAsMarkdown = renderAsMarkdown
self.temperature = temperature
}

Expand Down Expand Up @@ -76,9 +78,13 @@ open class TextChatViewModel<CustomContent: View> {
var streamText = ""
do {
let parsingTask = ResponseParsingTask()
let attributedSend = await parsingTask.parse(text: text)
try Task.checkCancellation()
messageRow.send = .attributed(attributedSend)
if renderAsMarkdown {
let attributedSend = await parsingTask.parse(text: text)
try Task.checkCancellation()
messageRow.send = .attributed(attributedSend)
} else {
messageRow.send = .rawText(text)
}

self.messages.append(messageRow)
let parserThresholdTextCount = 64
Expand All @@ -88,42 +94,48 @@ open class TextChatViewModel<CustomContent: View> {
let stream = try await api.sendMessageStream(text: text, model: model, systemText: systemText, temperature: temperature)
for try await text in stream {
streamText += text
currentTextCount += text.count

if currentTextCount >= parserThresholdTextCount || text.contains("```") {
currentOutput = await parsingTask.parse(text: streamText)
try Task.checkCancellation()
currentTextCount = 0
}
if renderAsMarkdown {
currentTextCount += text.count

if currentTextCount >= parserThresholdTextCount || text.contains("```") {
currentOutput = await parsingTask.parse(text: streamText)
try Task.checkCancellation()
currentTextCount = 0
}

if let currentOutput = currentOutput, !currentOutput.results.isEmpty {
let suffixText = streamText.trimmingPrefix(currentOutput.string)
var results = currentOutput.results
let lastResult = results[results.count - 1]
var lastAttrString = lastResult.attributedString
if lastResult.isCodeBlock {
#if os(macOS)
lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: NSFont.systemFont(ofSize: 12).apply(newTraits: .monoSpace), .foregroundColor: NSColor.white])))
#else
lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: UIFont.systemFont(ofSize: 12).apply(newTraits: .traitMonoSpace), .foregroundColor: UIColor.white])))
#endif

if let currentOutput = currentOutput, !currentOutput.results.isEmpty {
let suffixText = streamText.trimmingPrefix(currentOutput.string)
var results = currentOutput.results
let lastResult = results[results.count - 1]
var lastAttrString = lastResult.attributedString
if lastResult.isCodeBlock {
#if os(macOS)
lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: NSFont.systemFont(ofSize: 12).apply(newTraits: .monoSpace), .foregroundColor: NSColor.white])))
#else
lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: UIFont.systemFont(ofSize: 12).apply(newTraits: .traitMonoSpace), .foregroundColor: UIColor.white])))
#endif

} else {
lastAttrString.append(AttributedString(String(suffixText)))
}
results[results.count - 1] = ParserResult(attributedString: lastAttrString, isCodeBlock: lastResult.isCodeBlock, codeBlockLanguage: lastResult.codeBlockLanguage)
messageRow.response = .attributed(.init(string: streamText, results: results))
} else {
lastAttrString.append(AttributedString(String(suffixText)))
messageRow.response = .attributed(.init(string: streamText, results: [
ParserResult(attributedString: AttributedString(stringLiteral: streamText), isCodeBlock: false, codeBlockLanguage: nil)
]))
}
results[results.count - 1] = ParserResult(attributedString: lastAttrString, isCodeBlock: lastResult.isCodeBlock, codeBlockLanguage: lastResult.codeBlockLanguage)
messageRow.response = .attributed(.init(string: streamText, results: results))

} else {
messageRow.response = .attributed(.init(string: streamText, results: [
ParserResult(attributedString: AttributedString(stringLiteral: streamText), isCodeBlock: false, codeBlockLanguage: nil)
]))
messageRow.response = .rawText(streamText)
}

self.messages[self.messages.count - 1] = messageRow
if let currentString = currentOutput?.string, currentString != streamText {
let output = await parsingTask.parse(text: streamText)
try Task.checkCancellation()
messageRow.response = .attributed(output)
if renderAsMarkdown {
if let currentString = currentOutput?.string, currentString != streamText {
let output = await parsingTask.parse(text: streamText)
try Task.checkCancellation()
messageRow.response = .attributed(output)
}
}
}
} catch is CancellationError {
Expand Down Expand Up @@ -158,12 +170,14 @@ open class TextChatViewModel<CustomContent: View> {
let responseText = try await api.sendMessage(text: text, model: model, systemText: systemText, temperature: temperature)
try Task.checkCancellation()

let parsingTask = ResponseParsingTask()
let output = await parsingTask.parse(text: responseText)
try Task.checkCancellation()

messageRow.response = .attributed(output)

if renderAsMarkdown {
let parsingTask = ResponseParsingTask()
let output = await parsingTask.parse(text: responseText)
try Task.checkCancellation()
messageRow.response = .attributed(output)
} else {
messageRow.response = .rawText(responseText)
}
} catch {
messageRow.responseError = error.localizedDescription
}
Expand Down
17 changes: 11 additions & 6 deletions Sources/ChatGPTUI/ViewModels/VoiceChatViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ open class VoiceChatViewModel<CustomContent: View>: NSObject, AVAudioRecorderDel
public var model: ChatGPTModel
public var systemText: String
public var temperature: Double
public var renderAsMarkdown = true

public var state: VoiceChatState<CustomContent> = .idle(nil) {
didSet {
Expand Down Expand Up @@ -43,11 +44,12 @@ open class VoiceChatViewModel<CustomContent: View>: NSObject, AVAudioRecorderDel
.first!.appendingPathComponent("recording.m4a")
}

public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, apiKey: String) {
public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) {
self.selectedVoice = voiceType
self.model = model
self.systemText = systemText
self.temperature = temperature
self.renderAsMarkdown = renderAsMarkdown
self.api = ChatGPTAPI(apiKey: apiKey)
super.init()
#if !os(macOS)
Expand Down Expand Up @@ -134,15 +136,18 @@ open class VoiceChatViewModel<CustomContent: View>: NSObject, AVAudioRecorderDel
let response = try await api.sendMessage(text: prompt, model: model, systemText: systemText, temperature: temperature)
try Task.checkCancellation()

let parsingTask = ResponseParsingTask()
let output = await parsingTask.parse(text: response)
try Task.checkCancellation()

let data = try await api.generateSpeechFrom(input: response, voice:
.init(rawValue: selectedVoice.rawValue) ?? .alloy)
try Task.checkCancellation()

try self.playAudio(data: data, response: .attributed(output))
if self.renderAsMarkdown {
let parsingTask = ResponseParsingTask()
let output = await parsingTask.parse(text: response)
try Task.checkCancellation()
try self.playAudio(data: data, response: .attributed(output))
} else {
try self.playAudio(data: data, response: .rawText(response))
}
} catch {
if Task.isCancelled { return }
state = .error(error)
Expand Down
4 changes: 2 additions & 2 deletions Sources/ChatGPTUI/Views/TextChatView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ public struct TextChatView<CustomContent: View>: View {
@State var vm: TextChatViewModel<CustomContent>
@FocusState var isTextFieldFocused: Bool

public init(senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, apiKey: String) where CustomContent == Text {
self.vm = .init(senderImage: senderImage, botImage: botImage, useStreaming: useStreaming, model: model, systemText: systemText, temperature: temperature, apiKey: apiKey)
public init(senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) where CustomContent == Text {
self.vm = .init(senderImage: senderImage, botImage: botImage, useStreaming: useStreaming, model: model, systemText: systemText, temperature: temperature, renderAsMarkdown: renderAsMarkdown, apiKey: apiKey)
}

public init(customContentVM: TextChatViewModel<CustomContent>) {
Expand Down
6 changes: 3 additions & 3 deletions Sources/ChatGPTUI/Views/VoiceChatView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ public struct VoiceChatView<CustomContent: View>: View {
@State var isSymbolAnimating = false
var loadingImageSystemName = "circle.dotted.circle"

public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, apiKey: String) where CustomContent == Text {
self.vm = .init(voiceType: voiceType, model: model, systemText: systemText, temperature: temperature, apiKey: apiKey)
public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) where CustomContent == Text {
self.vm = .init(voiceType: voiceType, model: model, systemText: systemText, temperature: temperature, renderAsMarkdown: renderAsMarkdown, apiKey: apiKey)
}

public init(customContentVM: VoiceChatViewModel<CustomContent>) {
Expand All @@ -36,7 +36,7 @@ public struct VoiceChatView<CustomContent: View>: View {
}
}
}
.contentMargins(.top, 16, for: .scrollContent)
.contentMargins(.vertical, 16, for: .scrollContent)
.frame(maxWidth: .infinity)
.padding(.horizontal)
.overlay { overlayView }
Expand Down

0 comments on commit 0b923c3

Please sign in to comment.