Skip to content

Commit 2583fef

Browse files
paulb777google-labs-jules[bot]ncooke3andrewheard
authored
[Vertex AI] Refactor generateContentStream to fix Swift 6 warning (#14504)
Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Nick Cooke <nickcooke@google.com> Co-authored-by: Andrew Heard <andrewheard@google.com>
1 parent 2ea79ab commit 2583fef

File tree

1 file changed

+23
-25
lines changed

1 file changed

+23
-25
lines changed

FirebaseVertexAI/Sources/GenerativeModel.swift

+23-25
Original file line numberDiff line numberDiff line change
@@ -185,33 +185,31 @@ public final class GenerativeModel: Sendable {
185185
isStreaming: true,
186186
options: requestOptions)
187187

188-
var responseIterator = generativeAIService.loadRequestStream(request: generateContentRequest)
189-
.makeAsyncIterator()
190-
return AsyncThrowingStream {
191-
let response: GenerateContentResponse?
192-
do {
193-
response = try await responseIterator.next()
194-
} catch {
195-
throw GenerativeModel.generateContentError(from: error)
196-
}
197-
198-
// The responseIterator will return `nil` when it's done.
199-
guard let response = response else {
200-
// This is the end of the stream! Signal it by sending `nil`.
201-
return nil
202-
}
188+
return AsyncThrowingStream { continuation in
189+
let responseStream = generativeAIService.loadRequestStream(request: generateContentRequest)
190+
Task {
191+
do {
192+
for try await response in responseStream {
193+
// Check the prompt feedback to see if the prompt was blocked.
194+
if response.promptFeedback?.blockReason != nil {
195+
throw GenerateContentError.promptBlocked(response: response)
196+
}
203197

204-
// Check the prompt feedback to see if the prompt was blocked.
205-
if response.promptFeedback?.blockReason != nil {
206-
throw GenerateContentError.promptBlocked(response: response)
207-
}
198+
// If the stream ended early unexpectedly, throw an error.
199+
if let finishReason = response.candidates.first?.finishReason, finishReason != .stop {
200+
throw GenerateContentError.responseStoppedEarly(
201+
reason: finishReason,
202+
response: response
203+
)
204+
}
208205

209-
// If the stream ended early unexpectedly, throw an error.
210-
if let finishReason = response.candidates.first?.finishReason, finishReason != .stop {
211-
throw GenerateContentError.responseStoppedEarly(reason: finishReason, response: response)
212-
} else {
213-
// Response was valid content, pass it along and continue.
214-
return response
206+
continuation.yield(response)
207+
}
208+
continuation.finish()
209+
} catch {
210+
continuation.finish(throwing: GenerativeModel.generateContentError(from: error))
211+
return
212+
}
215213
}
216214
}
217215
}

0 commit comments

Comments
 (0)