From 02136531a34071f0ece05de7d0f9b530794b0135 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Mon, 26 Jun 2023 18:49:36 +0200 Subject: [PATCH] fix: return index and delta in stream token (#680) Signed-off-by: mudler --- api/openai.go | 33 +++++++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/api/openai.go b/api/openai.go index b742b40..71cb030 100644 --- a/api/openai.go +++ b/api/openai.go @@ -152,9 +152,14 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { process := func(s string, req *OpenAIRequest, config *Config, loader *model.ModelLoader, responses chan OpenAIResponse) { ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool { resp := OpenAIResponse{ - Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. - Choices: []Choice{{Text: s}}, - Object: "text_completion", + Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. + Choices: []Choice{ + { + Index: 0, + Text: s, + }, + }, + Object: "text_completion", } log.Debug().Msgf("Sending goroutine: %s", s) @@ -228,8 +233,14 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { } resp := &OpenAIResponse{ - Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. - Choices: []Choice{{FinishReason: "stop"}}, + Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. + Choices: []Choice{ + { + Index: 0, + FinishReason: "stop", + }, + }, + Object: "text_completion", } respData, _ := json.Marshal(resp) @@ -346,7 +357,7 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool { resp := OpenAIResponse{ Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. - Choices: []Choice{{Delta: &Message{Content: s}}}, + Choices: []Choice{{Delta: &Message{Content: s}, Index: 0}}, Object: "chat.completion.chunk", } log.Debug().Msgf("Sending goroutine: %s", s) @@ -429,8 +440,14 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { } resp := &OpenAIResponse{ - Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. - Choices: []Choice{{FinishReason: "stop"}}, + Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. + Choices: []Choice{ + { + FinishReason: "stop", + Index: 0, + Delta: &Message{}, + }}, + Object: "chat.completion.chunk", } respData, _ := json.Marshal(resp)