Skip to content

Commit

Permalink
Merge pull request #66 from Deeptechia/main
Browse files Browse the repository at this point in the history
Geppetto v0.2.4
  • Loading branch information
kelyacf authored Aug 8, 2024
2 parents 42912e0 + c50468f commit 6065a30
Show file tree
Hide file tree
Showing 14 changed files with 232 additions and 58 deletions.
17 changes: 10 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

## ⭐️ Key Features

- 🔀 **Multi-Model Support:** Toggle effortlessly between AI models like ChatGPT and Gemini to suit your specific requirements. ChatGPT model gpt4-turbo is set as the default model
- 🔀 **Multi-Model Support:** Toggle effortlessly between AI models like ChatGPT, Claude and Gemini to suit your specific requirements. ChatGPT model gpt4-turbo is set as the default model.
- 💬 **Streamlined Communication:** Initiate dynamic conversation threads by directly messaging Geppetto.
- ➡️ **Advanced LLM Control:** Manage multiple AI models with the advanced LLM controller component.
- 🔧 **Effortless Setup:** Enjoy a smooth setup experience powered by Docker 🐳.
Expand All @@ -34,20 +34,21 @@

### 🔒 Allowed Users

- Access is granted only to users listed in the [allowed users configuration file](./config/allowed-slack-ids.json).
- Access is granted only to users listed in the [allowed users configuration file](/config/allowed-slack-ids.json).

## 🔀 Switching AI Models

- To switch between ChatGPT and Gemini, or other models, include the following commands in your message:
- To switch between ChatGPT, Gemini and Claude include the following commands in your message:
- `llm_openai` to use ChatGPT
- `llm_gemini` to use Gemini
- `llm_claude` to use Claude

## 🛠️ Setup and Configuration

### 🔧 Slack App Configuration

1. **Modify App**:
- **Edit `manifest-dev.yaml`**: Adjust fields under `display_information` and `bot_user` to tailor Geppetto for your needs.
- **Edit `config/manifest-dev.yaml`**: Adjust fields under `display_information` and `bot_user` to tailor Geppetto for your needs.
2. **Create App**:
- Go to the [Slack API](https://api.slack.com) and navigate to *Your Apps*.
- Click on *Create New App*.
Expand All @@ -71,16 +72,18 @@

4. **Environment Setup**

Copy `.configuration/.env.example` into a new `.configuration/.env`, and adjust the environment variables accordingly:
Copy `config/.env.example` into a new `config/.env`, and adjust the environment variables accordingly:

- `SLACK_BOT_TOKEN`: Your Slack bot token (This is the Bot User OAuth Token, it should start with 'xoxb').
- `SLACK_APP_TOKEN`: Your Slack application token (This is the App-Level Token, it should start with 'xapp').
- `OPENAI_API_KEY`: Your OpenAI API key.
- `SIGNING_SECRET`: Your Signing secret to verify Slack requests (from your Slack App Credentials).
- `DALLE_MODEL`: The OpenAI DALL-E-3 model.
- `CHATGPT_MODEL`: The OpenAI ChatGPT-4 model.
- `DALLE_MODEL`: The OpenAI DALL-E model.
- `CHATGPT_MODEL`: The OpenAI ChatGPT model.
- `GEMINI_MODEL`: The Gemini model.
- `GOOGLE_API_KEY`: The Google Gemini API key.
- `CLAUDE_MODEL`: The Claude model.
- `CLAUDE_API_KEY`: The Anthropic Claude API key.

## 🚀 Deployment

Expand Down
4 changes: 4 additions & 0 deletions config/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,7 @@ DALLE_MODEL = "dall-e-3"
SIGNING_SECRET = "YOUR_SECRET"
GOOGLE_API_KEY = "YOUR_TOKEN"
GEMINI_MODEL = "gemini-pro"
CLAUDE_API_KEY = "YOUR_TOKEN"
CLAUDE_MODEL = "claude-3-5-sonnet-20240620"

GEPPETTO_VERSION = "0.2.4"
85 changes: 85 additions & 0 deletions geppetto/claude_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import re
import os
import logging

from .llm_api_handler import LLMHandler
from anthropic import Anthropic
from dotenv import load_dotenv
from typing import List
from typing import Dict

load_dotenv(os.path.join("config", ".env"))

ANTHROPIC_API_KEY = os.getenv("CLAUDE_API_KEY")
CLAUDE_MODEL = os.getenv("CLAUDE_MODEL")

VERSION = os.getenv("GEPPETTO_VERSION")

def convert_claude_to_slack(text):
"""
Converts Claude markdown format to Slack markdown format.
This function handles:
change to claude format
Args:
text (str): The Claude markdown text to be converted.
Returns:
str: The markdown text formatted for Slack.
"""
if not isinstance(text, str):
raise ValueError("Input must be a string.")

formatted_text = text.replace("* ", "- ")
formatted_text = formatted_text.replace("**", "*")
formatted_text = formatted_text.replace("__", "_")
formatted_text = formatted_text.replace("- ", "• ")
formatted_text = re.sub(r"\[(.*?)\]\((.*?)\)", r"<\2|\1>", formatted_text)

formatted_text += f"\n\n_(Geppetto v{VERSION} Source: Claude Model {CLAUDE_MODEL})_"

return formatted_text


class ClaudeHandler(LLMHandler):

def __init__(
self,
personality,
):
super().__init__(
'Claude',
CLAUDE_MODEL,
Anthropic(api_key=ANTHROPIC_API_KEY)
)
self.claude_model = CLAUDE_MODEL
self.personality = personality
self.system_role = "system"
self.assistant_role = "assistant"
self.user_role = "user"
self.MAX_TOKENS = 1024

def llm_generate_content(self, user_prompt: List[Dict], status_callback=None, *status_callback_args):
logging.info("Sending msg to claude: %s" % user_prompt)

geppetto = {"role": "assistant",
"content": " This is for your information only. Do not write this in your answer. Your name is Geppetto, a bot developed by DeepTechia. Answer only in the language the user spoke or asked you to do."}

try:
user_prompt.append(geppetto)
response = self.client.messages.create(
model = self.model,
max_tokens = self.MAX_TOKENS,
messages = user_prompt,
)

markdown_response = convert_claude_to_slack(str(response.content[0].text))
return markdown_response

except Exception as e:
logging.error(f"Error generating content: {e}")
return "I'm sorry, I couldn't generate a response at this time. Try using another AI model."



5 changes: 4 additions & 1 deletion geppetto/gemini_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@

GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
GEMINI_MODEL=os.getenv("GEMINI_MODEL", "gemini-pro")

VERSION = os.getenv("GEPPETTO_VERSION")

MSG_FIELD = "parts"
MSG_INPUT_FIELD = "content"

Expand Down Expand Up @@ -40,7 +43,7 @@ def convert_gemini_to_slack(text):
formatted_text = formatted_text.replace("- ", "• ")
formatted_text = re.sub(r"\[(.*?)\]\((.*?)\)", r"<\2|\1>", formatted_text)

formatted_text += f"\n\n_(Geppetto v0.2.3 Source: Gemini Model {GEMINI_MODEL})_"
formatted_text += f"\n\n_(Geppetto v{VERSION} Source: Gemini Model {GEMINI_MODEL})_"

return formatted_text

Expand Down
13 changes: 10 additions & 3 deletions geppetto/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,16 @@
from .slack_handler import SlackHandler
from .openai_handler import OpenAIHandler
from .gemini_handler import GeminiHandler
from .claude_handler import ClaudeHandler
from slack_bolt.adapter.socket_mode import SocketModeHandler
from .utils import load_json

load_dotenv(os.path.join("config", ".env"))


SLACK_BOT_TOKEN = os.getenv("SLACK_BOT_TOKEN")
SLACK_APP_TOKEN = os.getenv("SLACK_APP_TOKEN")
SIGNING_SECRET = os.getenv("SIGNING_SECRET")
SLACK_BOT_TOKEN = os.getenv("SLACK_BOT_TOKEN_TEST")
SLACK_APP_TOKEN = os.getenv("SLACK_APP_TOKEN_TEST")
SIGNING_SECRET = os.getenv("SIGNING_SECRET_TEST")

DEFAULT_RESPONSES = load_json("default_responses.json")

Expand All @@ -39,6 +40,12 @@ def initialized_llm_controller():
"handler_args": {
"personality": DEFAULT_RESPONSES["features"]["personality"]
}
},
{ "name": "Claude",
"handler": ClaudeHandler,
"handler_args": {
"personality": DEFAULT_RESPONSES["features"]["personality"]
}
}
]
)
Expand Down
4 changes: 3 additions & 1 deletion geppetto/openai_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
DALLE_MODEL = os.getenv("DALLE_MODEL")
CHATGPT_MODEL = os.getenv("CHATGPT_MODEL")

VERSION = os.getenv("GEPPETTO_VERSION")

OPENAI_IMG_FUNCTION = "generate_image"
ROLE_FIELD = "role"

Expand All @@ -44,7 +46,7 @@ def convert_openai_markdown_to_slack(text):
formatted_text = formatted_text.replace("__", "_")
formatted_text = formatted_text.replace("- ", "• ")
formatted_text = re.sub(r"\[(.*?)\]\((.*?)\)", r"<\2|\1>", formatted_text)
formatted_text += f"\n\n_(Geppetto v0.2.3 Source: OpenAI Model {CHATGPT_MODEL})_"
formatted_text += f"\n\n_(Geppetto v{VERSION} Source: OpenAI Model {CHATGPT_MODEL})_"

# Code blocks and italics remain unchanged but can be explicitly formatted if necessary
return formatted_text
Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ authors = [
"David Weil <[email protected]>",
"Diego Kelyacoubian <[email protected]>",
"Sebastian Wain <[email protected]>",
"Carlos Sims <[email protected]>"
"Carlos Sims <[email protected]>",
"Camila Gallo Garcia <[email protected]>"
]
description = "Geppetto is a sophisticated Slack bot that facilitates seamless interaction with multiple AI models, including OpenAI's ChatGPT-4, DALL-E-3, and Google's Gemini model."
readme = "README.md"
Expand All @@ -37,6 +38,7 @@ Pillow = "^10.1.0"
google-generativeai = "^0.7.1"
IPython = "^8.0.0"
unittest-xml-reporting = "^3.2.0"
anthropic = "^0.32.0"

[tool.poetry.scripts]
geppetto = "geppetto.main:main"
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ unittest-xml-reporting>=3.2.0
pytest>=8.2.0
pytest-cov>=5.0.0
flake8>=7.0.0
anthropic>=0.32.0
17 changes: 17 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import logging
import unittest


class TestBase(unittest.TestCase):
def setUp(self):
logging.getLogger().setLevel(logging.CRITICAL)


def OF(**kw):
class OF:
pass

instance = OF()
for k, v in kw.items():
setattr(instance, k, v)
return instance
59 changes: 59 additions & 0 deletions tests/test_claude.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import os
import sys
import unittest
from unittest.mock import Mock, patch

from tests import TestBase

script_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(script_dir)
sys.path.append(parent_dir)

from geppetto.claude_handler import ClaudeHandler
import logging
TEST_PERSONALITY = "Your AI assistant"


class TestClaude(TestBase):
@classmethod
def setUpClass(cls):
cls.patcher = patch("geppetto.claude_handler.Anthropic")
cls.mock_claude = cls.patcher.start()
cls.claude_handler = ClaudeHandler(personality=TEST_PERSONALITY)
logging.getLogger().setLevel(logging.CRITICAL)

@classmethod
def tearDownClass(cls):
cls.patcher.stop()

def test_personality(self):
self.assertEqual(self.claude_handler.personality, TEST_PERSONALITY)

def test_llm_generate_content(self):
user_prompt = [{"role":"user", "content": "Hello, Claude!"}]

mock_response = Mock()
mock_response.content = [Mock(text="Mocked Claude response")]
self.claude_handler.client.messages.create = Mock(return_value=mock_response)

response = self.claude_handler.llm_generate_content(user_prompt).split('\n\n_(Geppetto', 1)[0].strip()

self.assertEqual(response, "Mocked Claude response")


def test_failed_to_llm_generate_content(self):

failed_response = "I'm sorry, I couldn't generate a response at this time. Try using another AI model."

mock_claude = Mock()
mock_claude.content = [Mock(text=failed_response)]
mock_claude.return_value = mock_claude

self.claude_handler.client.messages.create = mock_claude
response = self.claude_handler.llm_generate_content("")

self.assertEqual(response, failed_response)


if __name__ == "__main__":
unittest.main()
15 changes: 8 additions & 7 deletions tests/test_controller.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import unittest
from geppetto.llm_api_handler import LLMHandler
from geppetto.llm_controller import LLMController
from tests import TestBase

ClientMock = {}

Expand Down Expand Up @@ -69,16 +70,16 @@ def get_prompt_from_thread(self, **args):
]


class TestController(unittest.TestCase):
@classmethod
def setUp(cls):
cls.llm_controller = LLMController(
class TestController(TestBase):
def setUp(self):
super(TestBase, self).setUp()
self.llm_controller = LLMController(
sample_llms_cfg
)

@classmethod
def tearDown(cls):
cls.llm_controller = None
def tearDown(self):
super(TestBase, self).tearDown()
self.llm_controller = None

def test_controller_set_up(self):
self.assertEqual(len(self.llm_controller.llm_cfgs), 2)
Expand Down
11 changes: 3 additions & 8 deletions tests/test_gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,17 @@
import unittest
from unittest.mock import Mock, patch

from tests import TestBase

script_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(script_dir)
sys.path.append(parent_dir)

from geppetto.exceptions import InvalidThreadFormatError
from geppetto.gemini_handler import GeminiHandler

def OF(**kw):
class OF:
pass
instance = OF()
for k, v in kw.items():
setattr(instance, k, v)
return instance

class TestGemini(unittest.TestCase):
class TestGemini(TestBase):
@classmethod
def setUpClass(cls):
cls.patcher = patch("geppetto.gemini_handler.genai")
Expand Down
Loading

0 comments on commit 6065a30

Please sign in to comment.