From 3b3164b0394902be87c91aea0390bc65b208e284 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Fri, 9 Jun 2023 00:45:44 +0200 Subject: [PATCH] examples(telegram): add (#547) --- examples/README.md | 10 ++++ examples/telegram-bot/README.md | 30 +++++++++++ examples/telegram-bot/docker-compose.yml | 66 ++++++++++++++++++++++++ 3 files changed, 106 insertions(+) create mode 100644 examples/telegram-bot/README.md create mode 100644 examples/telegram-bot/docker-compose.yml diff --git a/examples/README.md b/examples/README.md index 2285ed1..d86cd1f 100644 --- a/examples/README.md +++ b/examples/README.md @@ -106,6 +106,16 @@ Shows how to integrate with `Langchain` and `Chroma` to enable question answerin [Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain-chroma/) +### Telegram bot + +_by [@mudler](https://github.com/mudler) + +![Screenshot from 2023-06-09 00-36-26](https://github.com/go-skynet/LocalAI/assets/2420543/e98b4305-fa2d-41cf-9d2f-1bb2d75ca902) + +Use LocalAI to power a Telegram bot assistant, with Image generation and audio support! + +[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/telegram-bot/) + ### Template for Runpod.io _by [@fHachenberg](https://github.com/fHachenberg)_ diff --git a/examples/telegram-bot/README.md b/examples/telegram-bot/README.md new file mode 100644 index 0000000..d0ab0df --- /dev/null +++ b/examples/telegram-bot/README.md @@ -0,0 +1,30 @@ +## Telegram bot + +![Screenshot from 2023-06-09 00-36-26](https://github.com/go-skynet/LocalAI/assets/2420543/e98b4305-fa2d-41cf-9d2f-1bb2d75ca902) + +This example uses a fork of [chatgpt-telegram-bot](https://github.com/karfly/chatgpt_telegram_bot) to deploy a telegram bot with LocalAI instead of OpenAI. + +```bash +# Clone LocalAI +git clone https://github.com/go-skynet/LocalAI + +cd LocalAI/examples/telegram-bot + +git clone https://github.com/mudler/chatgpt_telegram_bot + +cp -rf docker-compose.yml chatgpt_telegram_bot + +cd chatgpt_telegram_bot + +mv config/config.example.yml config/config.yml +mv config/config.example.env config/config.env + +# Edit config/config.yml to set the telegram bot token +vim config/config.yml + +# run the bot +docker-compose --env-file config/config.env up --build +``` + +Note: LocalAI is configured to download `gpt4all-j` in place of `gpt-3.5-turbo` and `stablediffusion` for image generation at the first start. Download size is >6GB, if your network connection is slow, adapt the `docker-compose.yml` file healthcheck section accordingly (replace `20m`, for instance with `1h`, etc.). +To configure models manually, comment the `PRELOAD_MODELS` environment variable in the `docker-compose.yml` file and see for instance the [chatbot-ui-manual example](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui-manual) `model` directory. \ No newline at end of file diff --git a/examples/telegram-bot/docker-compose.yml b/examples/telegram-bot/docker-compose.yml new file mode 100644 index 0000000..7a79248 --- /dev/null +++ b/examples/telegram-bot/docker-compose.yml @@ -0,0 +1,66 @@ +version: "3" + +services: + api: + image: quay.io/go-skynet/local-ai:v1.18.0-ffmpeg + # As initially LocalAI will download the models defined in PRELOAD_MODELS + # you might need to tweak the healthcheck values here according to your network connection. + # Here we give a timespan of 20m to download all the required files. + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"] + interval: 1m + timeout: 20m + retries: 20 + ports: + - 8080:8080 + environment: + - DEBUG=true + - MODELS_PATH=/models + - IMAGE_PATH=/tmp + # You can preload different models here as well. + # See: https://github.com/go-skynet/model-gallery + - 'PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, {"url": "github:go-skynet/model-gallery/stablediffusion.yaml"}, {"url": "github:go-skynet/model-gallery/whisper-base.yaml", "name": "whisper-1"}]' + volumes: + - ./models:/models:cached + command: ["/usr/bin/local-ai" ] + mongo: + container_name: mongo + image: mongo:latest + restart: always + ports: + - 127.0.0.1:${MONGODB_PORT:-27017}:${MONGODB_PORT:-27017} + volumes: + - ${MONGODB_PATH:-./mongodb}:/data/db + # TODO: add auth + + chatgpt_telegram_bot: + container_name: chatgpt_telegram_bot + command: python3 bot/bot.py + restart: always + environment: + - OPENAI_API_KEY=sk---anystringhere + - OPENAI_API_BASE=http://api:8080/v1 + build: + context: "." + dockerfile: Dockerfile + depends_on: + api: + condition: service_healthy + mongo: + condition: service_started + + mongo_express: + container_name: mongo-express + image: mongo-express:latest + restart: always + ports: + - 127.0.0.1:${MONGO_EXPRESS_PORT:-8081}:${MONGO_EXPRESS_PORT:-8081} + environment: + - ME_CONFIG_MONGODB_SERVER=mongo + - ME_CONFIG_MONGODB_PORT=${MONGODB_PORT:-27017} + - ME_CONFIG_MONGODB_ENABLE_ADMIN=false + - ME_CONFIG_MONGODB_AUTH_DATABASE=chatgpt_telegram_bot + - ME_CONFIG_BASICAUTH_USERNAME=${MONGO_EXPRESS_USERNAME:-username} + - ME_CONFIG_BASICAUTH_PASSWORD=${MONGO_EXPRESS_PASSWORD:-password} + depends_on: + - mongo \ No newline at end of file