From d0e67cce7550389b657d37bc5956ce4a9e925321 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Sun, 16 Jul 2023 00:09:28 +0200 Subject: [PATCH] fix: make last stream message to send empty content Signed-off-by: Ettore Di Giacinto --- api/openai/chat.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/openai/chat.go b/api/openai/chat.go index 345e5c9..5f2183e 100644 --- a/api/openai/chat.go +++ b/api/openai/chat.go @@ -18,8 +18,9 @@ import ( ) func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { + emptyMessage := "" + process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) { - emptyMessage := "" initialMessage := OpenAIResponse{ Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}}, @@ -222,7 +223,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) { FinishReason: "stop", Index: 0, - Delta: &Message{}, + Delta: &Message{Content: &emptyMessage}, }}, Object: "chat.completion.chunk", }