Allow to inject prompt as part of the call

add/first-example
mudler 2 years ago
parent 4a932483e1
commit c17dcc5e9d
  1. 17
      api.go

@ -64,6 +64,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
input := new(struct { input := new(struct {
Messages []Message `json:"messages"` Messages []Message `json:"messages"`
Model string `json:"model"` Model string `json:"model"`
Prompt string `json:"prompt"`
}) })
if err := c.BodyParser(input); err != nil { if err := c.BodyParser(input); err != nil {
return err return err
@ -126,12 +127,16 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
predInput := strings.Join(mess, "\n") predInput := strings.Join(mess, "\n")
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix if input.Prompt == "" {
templatedInput, err := loader.TemplatePrefix(input.Model, struct { // A model can have a "file.bin.tmpl" file associated with a prompt template prefix
Input string templatedInput, err := loader.TemplatePrefix(input.Model, struct {
}{Input: predInput}) Input string
if err == nil { }{Input: predInput})
predInput = templatedInput if err == nil {
predInput = templatedInput
}
} else {
predInput = input.Prompt + predInput
} }
// Generate the prediction using the language model // Generate the prediction using the language model

Loading…
Cancel
Save