forked from tailcallhq/tailcall
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
make model enum and rename to secret
Signed-off-by: Sahil Yeole <[email protected]>
- Loading branch information
1 parent
a6cac2d
commit 6ec6291
Showing
5 changed files
with
167 additions
and
18 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,145 @@ | ||
use std::collections::HashMap; | ||
|
||
use serde::de::{self, Deserializer}; | ||
use serde::ser::Serializer; | ||
use serde::{Deserialize, Serialize}; | ||
|
||
#[derive(Debug, Clone, Copy, PartialEq, Default)] | ||
pub enum Model { | ||
// OpenAI Models | ||
#[default] | ||
Gpt3_5Turbo, | ||
GPT4, | ||
Gpt4Turbo, | ||
Gpt4oMini, | ||
GPT4O, | ||
|
||
// Ollama Models | ||
Gemma2B, | ||
Gemma2, | ||
Llama3_1, | ||
|
||
// Anthropic Models | ||
Claude3Haiku20240307, | ||
Claude3Sonnet20240229, | ||
Claude3Opus20240229, | ||
Claude35Sonnet20240620, | ||
|
||
// Cohere Models | ||
CommandLightNightly, | ||
CommandLight, | ||
CommandNightly, | ||
Command, | ||
CommandR, | ||
CommandRPlus, | ||
|
||
// Gemini Models | ||
Gemini15FlashLatest, | ||
Gemini10Pro, | ||
Gemini15Flash, | ||
Gemini15Pro, | ||
|
||
// Groq Models | ||
LLAMA708192, | ||
LLAMA38192, | ||
LlamaGroq8b8192ToolUsePreview, | ||
LlamaGroq70b8192ToolUsePreview, | ||
Gemma29bIt, | ||
Gemma7bIt, | ||
Mixtral8x7b32768, | ||
Llama8bInstant, | ||
Llama70bVersatile, | ||
Llama405bReasoning, | ||
} | ||
|
||
impl Model { | ||
fn model_hashmap() -> &'static HashMap<&'static str, Model> { | ||
static MAP: once_cell::sync::Lazy<HashMap<&'static str, Model>> = | ||
once_cell::sync::Lazy::new(|| { | ||
let mut map = HashMap::new(); | ||
|
||
// OpenAI Models | ||
map.insert("gp-3.5-turbo", Model::Gpt3_5Turbo); | ||
map.insert("gpt-4", Model::GPT4); | ||
map.insert("gpt-4-turbo", Model::Gpt4Turbo); | ||
map.insert("gpt-4o-mini", Model::Gpt4oMini); | ||
map.insert("gpt-4o", Model::GPT4O); | ||
|
||
// Ollama Models | ||
map.insert("gemma:2b", Model::Gemma2B); | ||
map.insert("gemma2", Model::Gemma2); | ||
map.insert("llama3.1", Model::Llama3_1); | ||
|
||
// Anthropic Models | ||
map.insert("claude-3-haiku-20240307", Model::Claude3Haiku20240307); | ||
map.insert("claude-3-sonnet-20240229", Model::Claude3Sonnet20240229); | ||
map.insert("claude-3-opus-20240229", Model::Claude3Opus20240229); | ||
map.insert("claude-3-5-sonnet-20240620", Model::Claude35Sonnet20240620); | ||
|
||
// Cohere Models | ||
map.insert("command-light-nightly", Model::CommandLightNightly); | ||
map.insert("command-light", Model::CommandLight); | ||
map.insert("command-nightly", Model::CommandNightly); | ||
map.insert("command", Model::Command); | ||
map.insert("command-r", Model::CommandR); | ||
map.insert("command-r-plus", Model::CommandRPlus); | ||
|
||
// Gemini Models | ||
map.insert("gemini-1.5-flash-latest", Model::Gemini15FlashLatest); | ||
map.insert("gemini-1.0-pro", Model::Gemini10Pro); | ||
map.insert("gemini-1.5-flash", Model::Gemini15Flash); | ||
map.insert("gemini-1.5-pro", Model::Gemini15Pro); | ||
|
||
// Groq Models | ||
map.insert("llama3-70b-8192", Model::LLAMA708192); | ||
map.insert("llama3-8b-8192", Model::LLAMA38192); | ||
map.insert( | ||
"llama3-groq-8b-8192-tool-use-preview", | ||
Model::LlamaGroq8b8192ToolUsePreview, | ||
); | ||
map.insert( | ||
"llama3-groq-70b-8192-tool-use-preview", | ||
Model::LlamaGroq70b8192ToolUsePreview, | ||
); | ||
map.insert("gemma2-9b-it", Model::Gemma29bIt); | ||
map.insert("gemma-7b-it", Model::Gemma7bIt); | ||
map.insert("mixtral-8x7b-32768", Model::Mixtral8x7b32768); | ||
map.insert("llama-3.1-8b-instant", Model::Llama8bInstant); | ||
map.insert("llama-3.1-70b-versatile", Model::Llama70bVersatile); | ||
map.insert("llama-3.1-405b-reasoning", Model::Llama405bReasoning); | ||
|
||
map | ||
}); | ||
&MAP | ||
} | ||
|
||
pub fn from_str(model_name: &str) -> Option<Self> { | ||
Self::model_hashmap().get(model_name).copied() | ||
} | ||
|
||
pub fn as_str(&self) -> &'static str { | ||
Self::model_hashmap() | ||
.iter() | ||
.find_map(|(&k, &v)| if v == *self { Some(k) } else { None }) | ||
.unwrap_or_default() | ||
} | ||
} | ||
|
||
impl Serialize for Model { | ||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> | ||
where | ||
S: Serializer, | ||
{ | ||
serializer.serialize_str(self.as_str()) | ||
} | ||
} | ||
|
||
impl<'de> Deserialize<'de> for Model { | ||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> | ||
where | ||
D: Deserializer<'de>, | ||
{ | ||
let s = String::deserialize(deserializer)?; | ||
Model::from_str(&s).ok_or_else(|| de::Error::unknown_variant(&s, &[])) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters