Skip to content

Commit aa77487

Browse files
authoredJun 16, 2024
Start reading chat response stream immediately after headers are received (#13)
1 parent bbf161e commit aa77487

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed
 

‎dotnet/Codeblaze.SemanticKernel.Connectors.Ollama/ChatCompletion/OllamaChatCompletionService.cs

+4-2
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@ public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessa
5252
options = chatExecutionSettings
5353
};
5454

55-
var response = await Http.PostAsJsonAsync($"{Attributes["base_url"]}/api/chat", data, cancellationToken).ConfigureAwait(false);
55+
var request = new HttpRequestMessage(HttpMethod.Post, $"{Attributes["base_url"]}/api/chat");
56+
request.Content = JsonContent.Create(data);
57+
using var response = await Http.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
5658

5759
ValidateOllamaResponse(response);
5860

@@ -64,7 +66,7 @@ public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessa
6466

6567
while (!done)
6668
{
67-
string jsonResponse = await response.Content.ReadAsStringAsync().ConfigureAwait(false);
69+
string jsonResponse = await reader.ReadLineAsync();
6870

6971
var chatResponseMessage = JsonSerializer.Deserialize<OllamaChatResponseMessage>(jsonResponse);
7072
done = chatResponseMessage!.Done;

0 commit comments

Comments
 (0)