fix: return index and delta in stream token (#680)

Signed-off-by: mudler <mudler@localai.io>
renovate/github.com-imdario-mergo-1.x
Ettore Di Giacinto 1 year ago committed by GitHub
parent d3a486a4f8
commit 02136531a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 33
      api/openai.go

@ -152,9 +152,14 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
process := func(s string, req *OpenAIRequest, config *Config, loader *model.ModelLoader, responses chan OpenAIResponse) { process := func(s string, req *OpenAIRequest, config *Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool { ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
resp := OpenAIResponse{ resp := OpenAIResponse{
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{Text: s}}, Choices: []Choice{
Object: "text_completion", {
Index: 0,
Text: s,
},
},
Object: "text_completion",
} }
log.Debug().Msgf("Sending goroutine: %s", s) log.Debug().Msgf("Sending goroutine: %s", s)
@ -228,8 +233,14 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
} }
resp := &OpenAIResponse{ resp := &OpenAIResponse{
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{FinishReason: "stop"}}, Choices: []Choice{
{
Index: 0,
FinishReason: "stop",
},
},
Object: "text_completion",
} }
respData, _ := json.Marshal(resp) respData, _ := json.Marshal(resp)
@ -346,7 +357,7 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool { ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
resp := OpenAIResponse{ resp := OpenAIResponse{
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{Delta: &Message{Content: s}}}, Choices: []Choice{{Delta: &Message{Content: s}, Index: 0}},
Object: "chat.completion.chunk", Object: "chat.completion.chunk",
} }
log.Debug().Msgf("Sending goroutine: %s", s) log.Debug().Msgf("Sending goroutine: %s", s)
@ -429,8 +440,14 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
} }
resp := &OpenAIResponse{ resp := &OpenAIResponse{
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{FinishReason: "stop"}}, Choices: []Choice{
{
FinishReason: "stop",
Index: 0,
Delta: &Message{},
}},
Object: "chat.completion.chunk",
} }
respData, _ := json.Marshal(resp) respData, _ := json.Marshal(resp)

Loading…
Cancel
Save