Allow to inject prompt as part of the call

add/first-example
mudler 2 years ago
parent 4a932483e1
commit c17dcc5e9d
  1. 5
      api.go

@ -64,6 +64,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
input := new(struct { input := new(struct {
Messages []Message `json:"messages"` Messages []Message `json:"messages"`
Model string `json:"model"` Model string `json:"model"`
Prompt string `json:"prompt"`
}) })
if err := c.BodyParser(input); err != nil { if err := c.BodyParser(input); err != nil {
return err return err
@ -126,6 +127,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
predInput := strings.Join(mess, "\n") predInput := strings.Join(mess, "\n")
if input.Prompt == "" {
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix // A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := loader.TemplatePrefix(input.Model, struct { templatedInput, err := loader.TemplatePrefix(input.Model, struct {
Input string Input string
@ -133,6 +135,9 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
if err == nil { if err == nil {
predInput = templatedInput predInput = templatedInput
} }
} else {
predInput = input.Prompt + predInput
}
// Generate the prediction using the language model // Generate the prediction using the language model
prediction, err := model.Predict( prediction, err := model.Predict(

Loading…
Cancel
Save