diff --git a/Makefile b/Makefile index 90a48e9..6f42e0e 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ GOVET=$(GOCMD) vet BINARY_NAME=local-ai GOLLAMA_VERSION?=3f10005b70c657c317d2cae4c22a9bd295f54a3c -GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all -GPT4ALL_VERSION?=bc624f5389d656b1995b6db592f76f5853712cf6 +GPT4ALL_REPO?=https://github.com/go-skynet/gpt4all +GPT4ALL_VERSION?=f7498c9 GOGGMLTRANSFORMERS_VERSION?=6fb862c72bc04568120e711b176defe116d3751e RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp RWKV_VERSION?=049c1b54798a0fb8429a0905060fa5e2d64255ca @@ -232,8 +232,10 @@ test-models/testmodel: cp tests/models_fixtures/* test-models test: prepare test-models/testmodel + cp -r backend-assets api cp tests/models_fixtures/* test-models - C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --flake-attempts 5 -v -r ./api ./pkg + C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="!gpt4all" --flake-attempts 5 -v -r ./api ./pkg + C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="gpt4all" --flake-attempts 5 -v -r ./api ./pkg ## Help: help: ## Show this help. diff --git a/api/api_test.go b/api/api_test.go index e1d3f71..54118b8 100644 --- a/api/api_test.go +++ b/api/api_test.go @@ -3,6 +3,7 @@ package api_test import ( "bytes" "context" + "embed" "encoding/json" "fmt" "io/ioutil" @@ -95,6 +96,9 @@ func postModelApplyRequest(url string, request modelApplyRequest) (response map[ return } +//go:embed backend-assets/* +var backendAssets embed.FS + var _ = Describe("API test", func() { var app *fiber.App @@ -114,7 +118,7 @@ var _ = Describe("API test", func() { modelLoader = model.NewModelLoader(tmpdir) c, cancel = context.WithCancel(context.Background()) - app, err = App(WithContext(c), WithModelLoader(modelLoader)) + app, err = App(WithContext(c), WithModelLoader(modelLoader), WithBackendAssets(backendAssets), WithBackendAssetsOutput(tmpdir)) Expect(err).ToNot(HaveOccurred()) go app.Listen("127.0.0.1:9090") @@ -191,6 +195,32 @@ var _ = Describe("API test", func() { Expect(err).ToNot(HaveOccurred()) Expect(content["backend"]).To(Equal("bert-embeddings")) }) + It("runs gpt4all", Label("gpt4all"), func() { + if runtime.GOOS != "linux" { + Skip("test supported only on linux") + } + + response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{ + URL: "github:go-skynet/model-gallery/gpt4all-j.yaml", + Name: "gpt4all-j", + Overrides: map[string]string{}, + }) + + Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response)) + + uuid := response["uuid"].(string) + + Eventually(func() bool { + response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid) + fmt.Println(response) + return response["processed"].(bool) + }, "360s").Should(Equal(true)) + + resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "gpt4all-j", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "How are you?"}}}) + Expect(err).ToNot(HaveOccurred()) + Expect(len(resp.Choices)).To(Equal(1)) + Expect(resp.Choices[0].Message.Content).To(ContainSubstring("well")) + }) }) })