From c974dad799873892678b32738d3f99903d2f0b46 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Wed, 3 May 2023 17:29:18 +0200 Subject: [PATCH] Return usage in the API responses (#166) --- api/openai.go | 17 +++++++--- examples/README.md | 1 + examples/langchain-python/README.md | 33 +++++++++++++++++++ examples/langchain-python/docker-compose.yaml | 16 +++++++++ examples/langchain-python/models | 1 + examples/langchain-python/test.py | 6 ++++ 6 files changed, 69 insertions(+), 5 deletions(-) create mode 100644 examples/langchain-python/README.md create mode 100644 examples/langchain-python/docker-compose.yaml create mode 120000 examples/langchain-python/models create mode 100644 examples/langchain-python/test.py diff --git a/api/openai.go b/api/openai.go index c1d4001..08e6373 100644 --- a/api/openai.go +++ b/api/openai.go @@ -27,12 +27,19 @@ type ErrorResponse struct { Error *APIError `json:"error,omitempty"` } +type OpenAIUsage struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` +} + type OpenAIResponse struct { - Created int `json:"created,omitempty"` - Object string `json:"object,omitempty"` - ID string `json:"id,omitempty"` - Model string `json:"model,omitempty"` - Choices []Choice `json:"choices,omitempty"` + Created int `json:"created,omitempty"` + Object string `json:"object,omitempty"` + ID string `json:"id,omitempty"` + Model string `json:"model,omitempty"` + Choices []Choice `json:"choices,omitempty"` + Usage OpenAIUsage `json:"usage"` } type Choice struct { diff --git a/examples/README.md b/examples/README.md index 0af9966..d7fc24f 100644 --- a/examples/README.md +++ b/examples/README.md @@ -7,6 +7,7 @@ Here is a list of projects that can easily be integrated with the LocalAI backen - [chatbot-ui](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui/) (by [@mkellerman](https://github.com/mkellerman)) - [discord-bot](https://github.com/go-skynet/LocalAI/tree/master/examples/discord-bot/) (by [@mudler](https://github.com/mudler)) - [langchain](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain/) (by [@dave-gray101](https://github.com/dave-gray101)) +- [langchain-python](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain-python/) (by [@mudler](https://github.com/mudler)) - [slack-bot](https://github.com/go-skynet/LocalAI/tree/master/examples/slack-bot/) (by [@mudler](https://github.com/mudler)) ## Want to contribute? diff --git a/examples/langchain-python/README.md b/examples/langchain-python/README.md new file mode 100644 index 0000000..9fcf297 --- /dev/null +++ b/examples/langchain-python/README.md @@ -0,0 +1,33 @@ +## Langchain-python + +Langchain example from [quickstart](https://python.langchain.com/en/latest/getting_started/getting_started.html). + +To interact with langchain, you can just set the `OPENAI_API_BASE` URL and provide a token with a random string. + +See the example below: + +``` +# Clone LocalAI +git clone https://github.com/go-skynet/LocalAI + +cd LocalAI/examples/langchain-python + +# (optional) Checkout a specific LocalAI tag +# git checkout -b build + +# Download gpt4all-j to models/ +wget https://gpt4all.io/models/ggml-gpt4all-j.bin -O models/ggml-gpt4all-j + +# start with docker-compose +docker-compose up -d --build + + +pip install langchain +pip install openai + +export OPENAI_API_BASE=http://localhost:8080 +export OPENAI_API_KEY=sk- + +python test.py +# A good company name for a company that makes colorful socks would be "Colorsocks". +``` \ No newline at end of file diff --git a/examples/langchain-python/docker-compose.yaml b/examples/langchain-python/docker-compose.yaml new file mode 100644 index 0000000..ed3eaec --- /dev/null +++ b/examples/langchain-python/docker-compose.yaml @@ -0,0 +1,16 @@ +version: '3.6' + +services: + api: + image: quay.io/go-skynet/local-ai:latest + build: + context: ../../ + dockerfile: Dockerfile.dev + ports: + - 8080:8080 + environment: + - DEBUG=true + - MODELS_PATH=/models + volumes: + - ./models:/models:cached + command: ["/usr/bin/local-ai" ] diff --git a/examples/langchain-python/models b/examples/langchain-python/models new file mode 120000 index 0000000..9c5c2aa --- /dev/null +++ b/examples/langchain-python/models @@ -0,0 +1 @@ +../chatbot-ui/models \ No newline at end of file diff --git a/examples/langchain-python/test.py b/examples/langchain-python/test.py new file mode 100644 index 0000000..a9fac35 --- /dev/null +++ b/examples/langchain-python/test.py @@ -0,0 +1,6 @@ + +from langchain.llms import OpenAI + +llm = OpenAI(temperature=0.9,model_name="gpt-3.5-turbo") +text = "What would be a good company name for a company that makes colorful socks?" +print(llm(text))