docs(examples): add AutoGPT (#397)
parent
217dbb448e
commit
59f7953249
@ -0,0 +1,5 @@ |
|||||||
|
OPENAI_API_KEY=sk---anystringhere |
||||||
|
OPENAI_API_BASE=http://api:8080/v1 |
||||||
|
# Models to preload at start |
||||||
|
# Here we configure gpt4all as gpt-3.5-turbo and bert as embeddings |
||||||
|
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}] |
@ -0,0 +1,32 @@ |
|||||||
|
# AutoGPT |
||||||
|
|
||||||
|
Example of integration with [AutoGPT](https://github.com/Significant-Gravitas/Auto-GPT). |
||||||
|
|
||||||
|
## Run |
||||||
|
|
||||||
|
```bash |
||||||
|
# Clone LocalAI |
||||||
|
git clone https://github.com/go-skynet/LocalAI |
||||||
|
|
||||||
|
cd LocalAI/examples/autoGPT |
||||||
|
|
||||||
|
docker-compose run --rm auto-gpt |
||||||
|
``` |
||||||
|
|
||||||
|
Note: The example automatically downloads the `gpt4all` model as it is under a permissive license. The GPT4All model does not seem to be enough to run AutoGPT. WizardLM-7b-uncensored seems to perform better (with `f16: true`). |
||||||
|
|
||||||
|
See the `.env` configuration file to set a different model with the [model-gallery](https://github.com/go-skynet/model-gallery) by editing `PRELOAD_MODELS`. |
||||||
|
|
||||||
|
## Without docker |
||||||
|
|
||||||
|
Run AutoGPT with `OPENAI_API_BASE` pointing to the LocalAI endpoint. If you run it locally for instance: |
||||||
|
|
||||||
|
``` |
||||||
|
OPENAI_API_BASE=http://localhost:8080 python ... |
||||||
|
``` |
||||||
|
|
||||||
|
Note: you need a model named `gpt-3.5-turbo` and `text-embedding-ada-002`. You can preload those in LocalAI at start by setting in the env: |
||||||
|
|
||||||
|
``` |
||||||
|
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}] |
||||||
|
``` |
@ -0,0 +1,42 @@ |
|||||||
|
version: "3.9" |
||||||
|
services: |
||||||
|
api: |
||||||
|
image: quay.io/go-skynet/local-ai:latest |
||||||
|
ports: |
||||||
|
- 8080:8080 |
||||||
|
env_file: |
||||||
|
- .env |
||||||
|
environment: |
||||||
|
- DEBUG=true |
||||||
|
- MODELS_PATH=/models |
||||||
|
volumes: |
||||||
|
- ./models:/models:cached |
||||||
|
command: ["/usr/bin/local-ai" ] |
||||||
|
auto-gpt: |
||||||
|
image: significantgravitas/auto-gpt |
||||||
|
depends_on: |
||||||
|
api: |
||||||
|
condition: service_healthy |
||||||
|
redis: |
||||||
|
condition: service_started |
||||||
|
env_file: |
||||||
|
- .env |
||||||
|
environment: |
||||||
|
MEMORY_BACKEND: ${MEMORY_BACKEND:-redis} |
||||||
|
REDIS_HOST: ${REDIS_HOST:-redis} |
||||||
|
profiles: ["exclude-from-up"] |
||||||
|
volumes: |
||||||
|
- ./auto_gpt_workspace:/app/autogpt/auto_gpt_workspace |
||||||
|
- ./data:/app/data |
||||||
|
## allow auto-gpt to write logs to disk |
||||||
|
- ./logs:/app/logs |
||||||
|
## uncomment following lines if you want to make use of these files |
||||||
|
## you must have them existing in the same folder as this docker-compose.yml |
||||||
|
#- type: bind |
||||||
|
# source: ./azure.yaml |
||||||
|
# target: /app/azure.yaml |
||||||
|
#- type: bind |
||||||
|
# source: ./ai_settings.yaml |
||||||
|
# target: /app/ai_settings.yaml |
||||||
|
redis: |
||||||
|
image: "redis/redis-stack-server:latest" |
Loading…
Reference in new issue