From c17dcc5e9da79117f044d75dcd1a71a766d26969 Mon Sep 17 00:00:00 2001 From: mudler Date: Sun, 9 Apr 2023 09:36:19 +0200 Subject: [PATCH] Allow to inject prompt as part of the call --- api.go | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/api.go b/api.go index 79e4e0f..98a6d3f 100644 --- a/api.go +++ b/api.go @@ -64,6 +64,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre input := new(struct { Messages []Message `json:"messages"` Model string `json:"model"` + Prompt string `json:"prompt"` }) if err := c.BodyParser(input); err != nil { return err @@ -126,12 +127,16 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre predInput := strings.Join(mess, "\n") - // A model can have a "file.bin.tmpl" file associated with a prompt template prefix - templatedInput, err := loader.TemplatePrefix(input.Model, struct { - Input string - }{Input: predInput}) - if err == nil { - predInput = templatedInput + if input.Prompt == "" { + // A model can have a "file.bin.tmpl" file associated with a prompt template prefix + templatedInput, err := loader.TemplatePrefix(input.Model, struct { + Input string + }{Input: predInput}) + if err == nil { + predInput = templatedInput + } + } else { + predInput = input.Prompt + predInput } // Generate the prediction using the language model