fix: make completions endpoint more close to OpenAI specification (#790)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
master
Ettore Di Giacinto 1 year ago committed by GitHub
parent 26e1496075
commit 94817b557c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      api/openai/api.go
  2. 2
      api/openai/chat.go
  3. 4
      api/openai/completion.go

@ -46,7 +46,7 @@ type OpenAIResponse struct {
}
type Choice struct {
Index int `json:"index,omitempty"`
Index int `json:"index"`
FinishReason string `json:"finish_reason,omitempty"`
Message *Message `json:"message,omitempty"`
Delta *Message `json:"delta,omitempty"`

@ -302,7 +302,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
return
}
*c = append(*c, Choice{Message: &Message{Role: "assistant", Content: &s}})
*c = append(*c, Choice{FinishReason: "stop", Index: 0, Message: &Message{Role: "assistant", Content: &s}})
}, nil)
if err != nil {
return err

@ -122,7 +122,7 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
}
var result []Choice
for _, i := range config.PromptStrings {
for k, i := range config.PromptStrings {
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.Loader.TemplatePrefix(templateFile, struct {
Input string
@ -135,7 +135,7 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
}
r, err := ComputeChoices(i, input.N, config, o, o.Loader, func(s string, c *[]Choice) {
*c = append(*c, Choice{Text: s})
*c = append(*c, Choice{Text: s, FinishReason: "stop", Index: k})
}, nil)
if err != nil {
return err

Loading…
Cancel
Save