Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Vertex AI] Make GenerativeModel and Chat into Swift actors #13545

Merged
merged 5 commits into from
Sep 3, 2024
Prev Previous commit
Refactor starting new chat
  • Loading branch information
andrewheard committed Sep 3, 2024
commit 1acfa779385bf37e9a68b9a5ae19c4e5a8b2abc7
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,9 @@ struct ConversationScreen: View {
}

private func newChat() {
viewModel.startNewChat()
Task {
await viewModel.startNewChat()
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ class ConversationViewModel: ObservableObject {
/// This array holds both the user's and the system's chat messages
@Published var messages = [ChatMessage]()

/// Indicates we're waiting for the model to finish
@Published var busy = false
/// Indicates we're waiting for the model to finish or the UI is loading
@Published var busy = true

@Published var error: Error?
var hasError: Bool {
Expand All @@ -37,25 +37,28 @@ class ConversationViewModel: ObservableObject {

init() {
model = VertexAI.vertexAI().generativeModel(modelName: "gemini-1.5-flash")
Task {
await startNewChat()
}
}

func sendMessage(_ text: String, streaming: Bool = true) async {
error = nil
if chat == nil {
chat = await model.startChat()
}
stop()
if streaming {
await internalSendMessageStreaming(text)
} else {
await internalSendMessage(text)
}
}

func startNewChat() {
func startNewChat() async {
busy = true
defer {
busy = false
}
stop()
error = nil
chat = nil
messages.removeAll()
chat = await model.startChat()
}

func stop() {
Expand All @@ -64,8 +67,6 @@ class ConversationViewModel: ObservableObject {
}

private func internalSendMessageStreaming(_ text: String) async {
chatTask?.cancel()

chatTask = Task {
busy = true
defer {
Expand Down Expand Up @@ -100,8 +101,6 @@ class ConversationViewModel: ObservableObject {
}

private func internalSendMessage(_ text: String) async {
chatTask?.cancel()

chatTask = Task {
busy = true
defer {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,9 @@ struct FunctionCallingScreen: View {
}

private func newChat() {
viewModel.startNewChat()
Task {
await viewModel.startNewChat()
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,22 +62,19 @@ class FunctionCallingViewModel: ObservableObject {
),
])]
)
Task {
await startNewChat()
}
}

func sendMessage(_ text: String, streaming: Bool = true) async {
error = nil
chatTask?.cancel()

stop()
chatTask = Task {
busy = true
defer {
busy = false
}

if chat == nil {
chat = await model.startChat()
}

// first, add the user's message to the chat
let userMessage = ChatMessage(message: text, participant: .user)
messages.append(userMessage)
Expand All @@ -103,11 +100,14 @@ class FunctionCallingViewModel: ObservableObject {
}
}

func startNewChat() {
func startNewChat() async {
busy = true
defer {
busy = false
}
stop()
error = nil
chat = nil
messages.removeAll()
chat = await model.startChat()
}

func stop() {
Expand Down
Loading