Skip to content

Commit

Permalink
fix: Throw a warning instead of an error incase a non recommended mod…
Browse files Browse the repository at this point in the history
…el is selected for a ControlMode.

Task: IL-546
  • Loading branch information
FlorianSchepersAA committed Jun 4, 2024
1 parent 3d9f453 commit 2b87ede
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 10 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
### New Features
...
### Fixes
...
- ControlModesl throw warning instead of error incase a not recommended model is selected.
### Deprecations
...

Expand Down
22 changes: 13 additions & 9 deletions src/intelligence_layer/core/model.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import warnings
from abc import ABC, abstractmethod
from functools import lru_cache
from typing import Literal, Optional
Expand Down Expand Up @@ -195,13 +196,16 @@ def tokenize(self, text: str) -> Encoding:


class ControlModel(ABC, AlephAlphaModel):
AllowedModel: Literal[""]
RecommendedModel: Literal[""]

def __init__(
self, name: str, client: AlephAlphaClientProtocol | None = None
) -> None:
if name not in self.AllowedModel.__args__ or name == "": # type: ignore
raise ValueError(f"Invalid model name: {name}")
if name not in self.RecommendedModel.__args__ or name == "": # type: ignore
warnings.warn(
"The provided model is not a recommended model for this model class."\
"Make sure that the model you have selected is suited to be use for the prompt template used in this model class."
)
super().__init__(name, client)

@abstractmethod
Expand Down Expand Up @@ -232,7 +236,7 @@ class LuminousControlModel(ControlModel):
### Response:{{response_prefix}}"""
)

AllowedModel = Literal[
RecommendedModel = Literal[
"luminous-base-control-20230501",
"luminous-extended-control-20230501",
"luminous-supreme-control-20230501",
Expand All @@ -246,7 +250,7 @@ class LuminousControlModel(ControlModel):

def __init__(
self,
name: AllowedModel = "luminous-base-control",
name: str = "luminous-base-control",
client: Optional[AlephAlphaClientProtocol] = None,
) -> None:
super().__init__(name, client)
Expand Down Expand Up @@ -282,15 +286,15 @@ class Llama2InstructModel(ControlModel):
{{response_prefix}}{% endif %}""")

AllowedModel = Literal[
RecommendedModel = Literal[
"llama-2-7b-chat",
"llama-2-13b-chat",
"llama-2-70b-chat",
]

def __init__(
self,
name: AllowedModel = "llama-2-13b-chat",
name: str = "llama-2-13b-chat",
client: Optional[AlephAlphaClientProtocol] = None,
) -> None:
super().__init__(name, client)
Expand Down Expand Up @@ -327,14 +331,14 @@ class Llama3InstructModel(ControlModel):
)
EOT_TOKEN = "<|eot_id|>"

AllowedModel = Literal[
RecommendedModel = Literal[
"llama-3-8b-instruct",
"llama-3-70b-instruct",
]

def __init__(
self,
name: AllowedModel = "llama-3-8b-instruct",
name: str = "llama-3-8b-instruct",
client: Optional[AlephAlphaClientProtocol] = None,
) -> None:
super().__init__(name, client)
Expand Down

0 comments on commit 2b87ede

Please sign in to comment.