fix: downgrade gpt4all (#503)

Signed-off-by: mudler <mudler@mocaccino.org>
renovate/github.com-imdario-mergo-1.x
Ettore Di Giacinto 2 years ago committed by GitHub
parent e873fc7b71
commit b503725dc7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 8
      Makefile
  2. 32
      api/api_test.go

@ -4,8 +4,8 @@ GOVET=$(GOCMD) vet
BINARY_NAME=local-ai BINARY_NAME=local-ai
GOLLAMA_VERSION?=3f10005b70c657c317d2cae4c22a9bd295f54a3c GOLLAMA_VERSION?=3f10005b70c657c317d2cae4c22a9bd295f54a3c
GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all GPT4ALL_REPO?=https://github.com/go-skynet/gpt4all
GPT4ALL_VERSION?=bc624f5389d656b1995b6db592f76f5853712cf6 GPT4ALL_VERSION?=f7498c9
GOGGMLTRANSFORMERS_VERSION?=6fb862c72bc04568120e711b176defe116d3751e GOGGMLTRANSFORMERS_VERSION?=6fb862c72bc04568120e711b176defe116d3751e
RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp
RWKV_VERSION?=049c1b54798a0fb8429a0905060fa5e2d64255ca RWKV_VERSION?=049c1b54798a0fb8429a0905060fa5e2d64255ca
@ -232,8 +232,10 @@ test-models/testmodel:
cp tests/models_fixtures/* test-models cp tests/models_fixtures/* test-models
test: prepare test-models/testmodel test: prepare test-models/testmodel
cp -r backend-assets api
cp tests/models_fixtures/* test-models cp tests/models_fixtures/* test-models
C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --flake-attempts 5 -v -r ./api ./pkg C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="!gpt4all" --flake-attempts 5 -v -r ./api ./pkg
C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="gpt4all" --flake-attempts 5 -v -r ./api ./pkg
## Help: ## Help:
help: ## Show this help. help: ## Show this help.

@ -3,6 +3,7 @@ package api_test
import ( import (
"bytes" "bytes"
"context" "context"
"embed"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
@ -95,6 +96,9 @@ func postModelApplyRequest(url string, request modelApplyRequest) (response map[
return return
} }
//go:embed backend-assets/*
var backendAssets embed.FS
var _ = Describe("API test", func() { var _ = Describe("API test", func() {
var app *fiber.App var app *fiber.App
@ -114,7 +118,7 @@ var _ = Describe("API test", func() {
modelLoader = model.NewModelLoader(tmpdir) modelLoader = model.NewModelLoader(tmpdir)
c, cancel = context.WithCancel(context.Background()) c, cancel = context.WithCancel(context.Background())
app, err = App(WithContext(c), WithModelLoader(modelLoader)) app, err = App(WithContext(c), WithModelLoader(modelLoader), WithBackendAssets(backendAssets), WithBackendAssetsOutput(tmpdir))
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
go app.Listen("127.0.0.1:9090") go app.Listen("127.0.0.1:9090")
@ -191,6 +195,32 @@ var _ = Describe("API test", func() {
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(content["backend"]).To(Equal("bert-embeddings")) Expect(content["backend"]).To(Equal("bert-embeddings"))
}) })
It("runs gpt4all", Label("gpt4all"), func() {
if runtime.GOOS != "linux" {
Skip("test supported only on linux")
}
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
URL: "github:go-skynet/model-gallery/gpt4all-j.yaml",
Name: "gpt4all-j",
Overrides: map[string]string{},
})
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
uuid := response["uuid"].(string)
Eventually(func() bool {
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
fmt.Println(response)
return response["processed"].(bool)
}, "360s").Should(Equal(true))
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "gpt4all-j", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "How are you?"}}})
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1))
Expect(resp.Choices[0].Message.Content).To(ContainSubstring("well"))
})
}) })
}) })

Loading…
Cancel
Save