Skip to content

Commit

Permalink
Merge pull request #76 from innightwolfsleep/ooba_extension_mode
Browse files Browse the repository at this point in the history
Ooba extension mode
  • Loading branch information
innightwolfsleep authored Sep 14, 2023
2 parents 2d677ac + a406335 commit fac358f
Show file tree
Hide file tree
Showing 11 changed files with 123 additions and 92 deletions.
23 changes: 0 additions & 23 deletions .github/workflows/flake8.yml

This file was deleted.

43 changes: 22 additions & 21 deletions configs/telegram_config.json → configs/app_config.json
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
{
"bot_mode": "admin",
"generator_script": "generator_llama_cpp",
"model_path": "models\\llama-2-7b-chat.ggmlv3.q4_0.gguf",
"characters_dir_path": "characters",
"default_char": "Example.yaml",
"presets_dir_path": "presets",
"default_preset": "LLaMA-ggml-Precise.txt",
"model_lang": "en",
"user_lang": "en",
"history_dir_path": "history",
"token_file_path": "configs\\telegram_token.txt",
"admins_file_path": "configs\\telegram_admins.txt",
"users_file_path": "configs\\users_file_path.txt",
"generator_params_file_path": "configs\\telegram_generator_params.json",
"user_rules_file_path": "configs\\telegram_user_rules.json",
"telegram_sd_config": "configs\\telegram_sd_config.json",
"stopping_strings": ["<END>", "<START>", "end{code}"],
"eos_token": "None",
"translation_as_hidden_text": "on",
"sd_api_url": "http://127.0.0.1:7860"
{
"bot_mode": "admin",
"generator_script": "generator_llama_cpp",
"model_path": "models\\puma-3b.ggmlv3.q4_0.gguf",
"characters_dir_path": "characters",
"default_char": "Example.yaml",
"presets_dir_path": "presets",
"default_preset": "LLaMA-ggml-Precise.txt",
"model_lang": "en",
"user_lang": "en",
"history_dir_path": "history",
"token_file_path": "configs\\telegram_token.txt",
"admins_file_path": "configs\\telegram_admins.txt",
"users_file_path": "configs\\users_file_path.txt",
"generator_params_file_path": "configs\\telegram_generator_params.json",
"user_rules_file_path": "configs\\telegram_user_rules.json",
"telegram_sd_config": "configs\\telegram_sd_config.json",
"stopping_strings": ["<END>", "<START>", "end{code}"],
"eos_token": "None",
"translation_as_hidden_text": "on",
"sd_api_url": "http://127.0.0.1:7860",
"proxy_url": ""
}
23 changes: 23 additions & 0 deletions configs/ext_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"bot_mode": "admin",
"generator_script": "generator_text_generator_webui",
"model_path": "unused",
"characters_dir_path": "characters",
"default_char": "Example.yaml",
"presets_dir_path": "presets",
"default_preset": "LLaMA-ggml-Precise.txt",
"model_lang": "en",
"user_lang": "en",
"history_dir_path": "extensions\\telegram_bot\\history",
"token_file_path": "extensions\\telegram_bot\\configs\\telegram_token.txt",
"admins_file_path": "extensions\\telegram_bot\\configs\\telegram_admins.txt",
"users_file_path": "extensions\\telegram_bot\\configs\\users_file_path.txt",
"generator_params_file_path": "extensions\\telegram_bot\\configs\\telegram_generator_params.json",
"user_rules_file_path": "extensions\\telegram_bot\\configs\\telegram_user_rules.json",
"telegram_sd_config": "extensions\\telegram_bot\\configs\\telegram_sd_config.json",
"stopping_strings": ["<END>", "<START>", "end{code}"],
"eos_token": "None",
"translation_as_hidden_text": "on",
"sd_api_url": "http://127.0.0.1:7860",
"proxy_url": ""
}
17 changes: 11 additions & 6 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@
from telegram.ext import Updater

try:
from extensions.telegram_bot.source.telegram_bot_user import TelegramBotUser as User
import extensions.telegram_bot.source.telegram_bot_generator as generator_script
from extensions.telegram_bot.source.telegram_bot_silero import Silero as Silero
from extensions.telegram_bot.source.telegram_bot_sd_api import SdApi as SdApi
from extensions.telegram_bot.source.user import TelegramBotUser as User
import extensions.telegram_bot.source.generator as generator_script
from extensions.telegram_bot.source.silero import Silero as Silero
from extensions.telegram_bot.source.sd_api import SdApi as SdApi
except ImportError:
from source.user import TelegramBotUser as User
from source import generator as generator_script
Expand Down Expand Up @@ -137,7 +137,7 @@ class TelegramBotWrapper:

def __init__(
self,
config_file_path="configs/telegram_config.json",
config_file_path="configs/app_config.json",
):
"""Init telegram bot class. Use run_telegram_bot() to initiate bot.
Expand All @@ -155,6 +155,7 @@ def __init__(
self.user_rules_file_path = "telegram_user_rules.json"
self.sd_api_url = "http://127.0.0.1:7860"
self.sd_config_file_path = "telegram_sd_config.json"
self.proxy_url = ""
# Set bot mode
self.bot_mode = "admin"
self.generator_script = "" # mode loaded from config
Expand Down Expand Up @@ -228,6 +229,7 @@ def load_config_file(self, config_file_path: str):
)
self.stopping_strings = config.get("stopping_strings", self.stopping_strings)
self.eos_token = config.get("eos_token", self.eos_token)
self.proxy_url = config.get("proxy_url", self.proxy_url)
else:
logging.error("Cant find config_file " + config_file_path)

Expand All @@ -240,11 +242,14 @@ def run_telegram_bot(self, bot_token="", token_file_name=""):
:param token_file_name: (str) The name of the file containing the bot token. Default is `None`.
:return: None
"""
request_kwargs = {
"proxy_url": self.proxy_url,
}
if not bot_token:
token_file_name = token_file_name or self.token_file_path
with open(token_file_name, "r", encoding="utf-8") as f:
bot_token = f.read().strip()
self.updater = Updater(token=bot_token, use_context=True)
self.updater = Updater(token=bot_token, use_context=True, request_kwargs=request_kwargs)
self.updater.dispatcher.add_handler(CommandHandler("start", self.cb_start_command)),
self.updater.dispatcher.add_handler(MessageHandler(Filters.text, self.cb_get_message))
self.updater.dispatcher.add_handler(
Expand Down
59 changes: 37 additions & 22 deletions readme.md
Original file line number Diff line number Diff line change
@@ -1,31 +1,38 @@
#Extension connecting llm_python to telegram bot api.
-
![Image1](https://github.com/innightwolfsleep/storage/raw/main/textgen_telegram.PNG)

Providing chat like telegram bot interface with [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python), [langchain](https://pypi.org/project/langchain/) or transformers (tbc)
![Image1](https://github.com/innightwolfsleep/storage/raw/main/textgen_telegram.PNG)

REQUIREMENTS:
- python-telegram-bot==13.15
- pyyaml
- deep-translator==1.9.2
- llama-cpp-python
Providing chat like telegram bot interface for [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python) or [huggingface/transformers](https://github.com/huggingface/transformers).
In addition, can an extension for [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui).

HOW TO INSTALL:
1) clone this repo
```
git clone https://github.com/innightwolfsleep/llm_telegram_bot
```
2) install requirements.
```
pip install -r llm_telegram_bot\requirements.txt
```
---------------
HOW TO INSTALL (**standalone app**):
1) clone this repo
`git clone https://github.com/innightwolfsleep/llm_telegram_bot `
2) install requirements.
`pip install -r llm_telegram_bot\requirements_app.txt`

HOW TO USE:
HOW TO RUN (**standalone app**):
1) get bot token from https://t.me/BotFather
2) add bot token to environment (look `.env.example`) OR file `configs/telegram_token.txt`
3) move your model file to `models/`
4) set **model_path** to your model in `configs/telegram_config.json`
5) start `run.cmd` or `run.sh` or `python3 run.py`
4) set **model_path** to your model in `configs/app_config.json`
5) start `run.cmd`(windows) or `run.sh`(linux)
---------------
HOW TO INSTALL (**extension mode**):

1) obviously, install [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) first, add model, set all options you need
2) run `cmd_windows.bat` or `cmd_linux.sh` to enable venv
3) clone this repo to "text-generation-webui\extensions"
`git clone https://github.com/innightwolfsleep/text-generation-webui-telegram_bot text-generation-webui\extensions\telegram_bot`
4) install requirements
`pip install -r text-generation-webui\extensions\telegram_bot\ext_requirements_ext.txt`

HOW TO USE (**extension mode**):
1) get bot token from https://t.me/BotFather
2) add your bot token in `text-generation-webui\extensions\telegram_bot\configs\telegram_token.txt` file or oobabooga environment
3) run server.py with `--extensions telegram_bot`
4) (optional) if you are facing internet issue, change `proxy_url` at `extension_config.json` into your own proxy. For example: `https://127.0.0.1:10808`
---------------

FEATURES:
- chat and notebook modes
Expand All @@ -51,8 +58,11 @@ FEATURES:

CONFIGURATION:

`run_config.json` - config for running as standalone app (`run.sh` or `run.cmd`)
`extension_config` - config for running as extension for [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui)

```
telegram_config.json
x_config.json
bot_mode=admin
specific bot mode. admin for personal use
- admin - bot answer for everyone in chat-like mode. All buttons, include settings-for-all are avariable for everyone. (Default)
Expand Down Expand Up @@ -101,6 +111,11 @@ telegram_config.json
generating settings
translation_as_hidden_text=on
if "on" and model/user lang not the same - translation will be writed under spoiler. If "off" - translation without spoiler, no original text in message.
sd_api_url="http://127.0.0.1:7860"
stable diffusion api url, need to use "photo" prefixes
proxy_url
to avoid provider blocking
telegram_admins.txt
list of users id who forced to admin mode.
Expand Down
28 changes: 14 additions & 14 deletions requirements.txt → requirements_app.txt
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
python-telegram-bot==13.15
pyyaml>=6.0.0
deep-translator>=1.9.2
omegaconf==2.3.0
llama-cpp-python>=0.1.83
num2words>=0.5.12
transformers>=4.33.0
Pillow>=10.0.0
torch>=2.0.1
backoff>=2.2.1
langchain>=0.0.286
requests>=2.31.0
urllib3>=2.0.4
python-dotenv==1.0.0
python-telegram-bot==13.15
pyyaml>=6.0.0
deep-translator>=1.9.2
omegaconf==2.3.0
llama-cpp-python>=0.1.83
num2words>=0.5.12
transformers>=4.33.0
Pillow>=10.0.0
torch>=2.0.1
backoff>=2.2.1
langchain>=0.0.286
requests>=2.31.0
urllib3>=2.0.4
python-dotenv==1.0.0
11 changes: 11 additions & 0 deletions requirements_ext.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
python-telegram-bot==13.15
pyyaml>=6.0.0
deep-translator>=1.9.2
omegaconf==2.3.0
num2words>=0.5.12
Pillow>=10.0.0
torch>=2.0.1
backoff>=2.2.1
requests>=2.31.0
urllib3>=2.0.4
python-dotenv==1.0.0
2 changes: 1 addition & 1 deletion run.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from main import TelegramBotWrapper
from dotenv import load_dotenv

config_file_path = "configs/telegram_config.json"
config_file_path = "configs/app_config.json"


def run_server(token):
Expand Down
4 changes: 2 additions & 2 deletions script.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import os
from threading import Thread
from extensions.telegram_bot.telegram_bot_wrapper import TelegramBotWrapper
from extensions.telegram_bot.main import TelegramBotWrapper
from dotenv import load_dotenv

# This module added to get compatibility with text-generation-webui-telegram_bot

config_file_path = "extensions/telegram_bot/configs/telegram_config.json"
config_file_path = "extensions/telegram_bot/configs/ext_config.json"


def run_server(token=""):
Expand Down
3 changes: 1 addition & 2 deletions source/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@ def init(script="GeneratorLlamaCpp", model_path="", n_ctx=4096, n_gpu_layers=0):
generator_class = getattr(importlib.import_module("source.generators." + script), "Generator")
except ImportError:
generator_class = getattr(
importlib.import_module("extensions.source.telegram_bot.generators." + script),
"Generator",
importlib.import_module("extensions.telegram_bot.source.generators." + script), "Generator"
)
global generator
generator = generator_class(model_path, n_ctx=n_ctx, n_gpu_layers=n_gpu_layers)
Expand Down
2 changes: 1 addition & 1 deletion source/silero.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from num2words import num2words

try:
from extensions.telegram_bot.src.TelegramBotUser import TelegramBotUser as User
from extensions.telegram_bot.source.user import TelegramBotUser as User
except ImportError:
from source.user import TelegramBotUser as User

Expand Down

0 comments on commit fac358f

Please sign in to comment.