Skip to content

Commit

Permalink
Merge branch 'v1.0' into lifei/aarch64-linux
Browse files Browse the repository at this point in the history
* v1.0:
  Update cli-release.yml
  feat: added groq provider (#494)
  fix: use rust tls (#500)
  fix: Ldelalande/fix scroll (#504)
  feat: MCP server sdk (simple version first) (#499)
  tiny change to use most recent in stack (#501)
  stop bubbles filling screen (#495)
  chore: V1.0 release automation (#493)
  • Loading branch information
lifeizhou-ap committed Dec 20, 2024
2 parents 16a4a7e + de90c55 commit 742a1e1
Show file tree
Hide file tree
Showing 42 changed files with 2,592 additions and 1,111 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
- name: Install Libs
run: |
sudo apt update -y
sudo apt install -y libdbus-1-dev libssl-dev gnome-keyring libxcb1-dev
sudo apt install -y libdbus-1-dev gnome-keyring libxcb1-dev
- name: Start gnome-keyring
# run gnome-keyring with 'foobar' as password for the login keyring
Expand Down
82 changes: 58 additions & 24 deletions .github/workflows/cli-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,65 +3,99 @@ on:
tags:
- "v1.*"
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true

name: Release CLI

jobs:
build:
name: Build ${{ matrix.os }}-${{ matrix.architecture }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
architecture: [x86_64, aarch64]
include:
# Linux x86_64 build on Ubuntu
- os: ubuntu-latest
architecture: x86_64
suffix: unknown-linux-gnu

# TODO: Linux aarch64 (ARM64) build on Ubuntu via cross-compilation - probably use `cross-rs`

# macOS x86_64 build
- os: macos-latest
architecture: x86_64
suffix: apple-darwin

# macOS ARM64 build
- os: macos-latest
architecture: aarch64
suffix: apple-darwin

steps:
- uses: actions/checkout@v4
- name: Set up Rust
# Step 1: Checkout the code
- name: Checkout code
uses: actions/checkout@v4

# Step 2: Set up Rust toolchain
# This will install the specified target as per matrix
- name: Set up Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
target: ${{ matrix.architecture }}-${{ matrix.suffix }}
- name: Install cross-compilation tools and dependencies

# Step 3: Install dependencies for macOS ARM64 builds
- name: Install dependencies for macOS ARM64
if: matrix.os == 'macos-latest' && matrix.architecture == 'aarch64'
run: |
brew install llvm
rustup target add aarch64-apple-darwin
echo "export PATH=$(brew --prefix llvm)/bin:\$PATH" >> $GITHUB_ENV
# Step 4: Install Libs for Ubuntu (x86_64 and aarch64)
- name: Install Libs for Ubuntu
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y pkg-config libssl-dev
if [ "${{ matrix.architecture }}" = "aarch64" ]; then
sudo dpkg --add-architecture arm64
sudo apt-get update
sudo apt-get install -y gcc-aarch64-linux-gnu
sudo apt-get install -y libssl-dev:arm64 libc6-dev-arm64-cross
fi
- id: build
shell: bash
sudo apt update -y
sudo apt install -y libdbus-1-dev gnome-keyring libxcb1-dev
gnome-keyring-daemon --components=secrets --daemonize --unlock <<< 'foobar'
# Step 5: Build the project
- name: Build
run: |
export TARGET=${{ matrix.architecture }}-${{ matrix.suffix }}
cargo build --release --target $TARGET
cd target/${TARGET}/release
tar -cjf goose-${TARGET}.tar.bz2 goose goosed
export TARGET_PATH="target/${TARGET}/release/goose-${TARGET}.tar.bz2"
echo "TARGET_PATH=${TARGET_PATH}" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@v4
echo "ARTIFACT=target/${TARGET}/release/goose-${TARGET}.tar.bz2" >> $GITHUB_ENV
# Step 6: Upload artifacts
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: goose-${{ matrix.architecture }}-${{ matrix.suffix }}
path: ${{ steps.build.outputs.TARGET_PATH }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
path: ${{ env.ARTIFACT }}

release:
name: Release
runs-on: ubuntu-latest
needs: [build]
permissions:
contents: write
steps:
- uses: actions/download-artifact@v4
# Step 1: Download all build artifacts
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
merge-multiple: true
- uses: ncipollo/release-action@v1

# Step 2: Create GitHub release with artifacts
- name: Create GitHub release
uses: ncipollo/release-action@v1
with:
artifacts: "goose-*.bz2"
artifacts: "goose-*.tar.bz2"
token: ${{ secrets.GITHUB_TOKEN }}
16 changes: 12 additions & 4 deletions crates/goose-cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,18 @@ serde_yaml = "0.9"
dirs = "4.0"
strum = "0.26"
strum_macros = "0.26"
reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false }
reqwest = { version = "0.12.9", features = [
"rustls-tls",
"json",
"cookies",
"gzip",
"brotli",
"deflate",
"zstd",
"charset",
"http2",
"stream"
], default-features = false }
rand = "0.8.5"
async-trait = "0.1"
rustyline = "15.0.0"
Expand All @@ -41,6 +52,3 @@ rust_decimal_macros = "1.36.0"
tempfile = "3"
temp-env = { version = "0.3.6", features = ["async_closure"] }

#[target.aarch64-unknown-linux-gnu]
#linker = "aarch64-linux-gnu-ld"

16 changes: 12 additions & 4 deletions crates/goose-cli/src/commands/configure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,13 @@ use cliclack::spinner;
use console::style;
use goose::key_manager::{get_keyring_secret, save_to_keyring, KeyRetrievalStrategy};
use goose::message::Message;
use goose::providers::anthropic::ANTHROPIC_DEFAULT_MODEL;
use goose::providers::databricks::DATABRICKS_DEFAULT_MODEL;
use goose::providers::factory;
use goose::providers::google::GOOGLE_DEFAULT_MODEL;
use goose::providers::groq::GROQ_DEFAULT_MODEL;
use goose::providers::ollama::OLLAMA_MODEL;
use goose::providers::openai::OPEN_AI_DEFAULT_MODEL;
use std::error::Error;

pub async fn handle_configure(
Expand Down Expand Up @@ -48,6 +53,7 @@ pub async fn handle_configure(
("ollama", "Ollama", "Local open source models"),
("anthropic", "Anthropic", "Claude models"),
("google", "Google Gemini", "Gemini models"),
("groq", "Groq", "AI models"),
])
.interact()?
.to_string()
Expand Down Expand Up @@ -154,11 +160,12 @@ pub async fn handle_configure(

pub fn get_recommended_model(provider_name: &str) -> &str {
match provider_name {
"openai" => "gpt-4o",
"databricks" => "claude-3-5-sonnet-2",
"openai" => OPEN_AI_DEFAULT_MODEL,
"databricks" => DATABRICKS_DEFAULT_MODEL,
"ollama" => OLLAMA_MODEL,
"anthropic" => "claude-3-5-sonnet-2",
"google" => "gemini-1.5-flash",
"anthropic" => ANTHROPIC_DEFAULT_MODEL,
"google" => GOOGLE_DEFAULT_MODEL,
"groq" => GROQ_DEFAULT_MODEL,
_ => panic!("Invalid provider name"),
}
}
Expand All @@ -170,6 +177,7 @@ pub fn get_required_keys(provider_name: &str) -> Vec<&'static str> {
"ollama" => vec!["OLLAMA_HOST"],
"anthropic" => vec!["ANTHROPIC_API_KEY"], // Removed ANTHROPIC_HOST since we use a fixed endpoint
"google" => vec!["GOOGLE_API_KEY"],
"groq" => vec!["GROQ_API_KEY"],
_ => panic!("Invalid provider name"),
}
}
14 changes: 12 additions & 2 deletions crates/goose-cli/src/profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use anyhow::Result;
use goose::key_manager::{get_keyring_secret, KeyRetrievalStrategy};
use goose::providers::configs::{
AnthropicProviderConfig, DatabricksAuth, DatabricksProviderConfig, GoogleProviderConfig,
ModelConfig, OllamaProviderConfig, OpenAiProviderConfig, ProviderConfig,
GroqProviderConfig, ModelConfig, OllamaProviderConfig, OpenAiProviderConfig, ProviderConfig,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
Expand Down Expand Up @@ -130,7 +130,17 @@ pub fn get_provider_config(provider_name: &str, profile: Profile) -> ProviderCon
.expect("GOOGLE_API_KEY not available in env or the keychain\nSet an env var or rerun `goose configure`");

ProviderConfig::Google(GoogleProviderConfig {
host: "https://generativelanguage.googleapis.com".to_string(), // Default Anthropic API endpoint
host: "https://generativelanguage.googleapis.com".to_string(),
api_key,
model: model_config,
})
}
"groq" => {
let api_key = get_keyring_secret("GROQ_API_KEY", KeyRetrievalStrategy::Both)
.expect("GROQ_API_KEY not available in env or the keychain\nSet an env var or rerun `goose configure`");

ProviderConfig::Groq(GroqProviderConfig {
host: "https://api.groq.com".to_string(),
api_key,
model: model_config,
})
Expand Down
40 changes: 37 additions & 3 deletions crates/goose-server/src/configuration.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
use crate::error::{to_env_var, ConfigError};
use config::{Config, Environment};
use goose::providers::configs::GoogleProviderConfig;
use goose::providers::configs::{GoogleProviderConfig, GroqProviderConfig};
use goose::providers::openai::OPEN_AI_DEFAULT_MODEL;
use goose::providers::{
configs::{
DatabricksAuth, DatabricksProviderConfig, ModelConfig, OllamaProviderConfig,
OpenAiProviderConfig, ProviderConfig,
},
factory::ProviderType,
google, ollama,
google, groq, ollama,
utils::ImageFormat,
};
use serde::Deserialize;
Expand Down Expand Up @@ -88,6 +89,17 @@ pub enum ProviderSettings {
#[serde(default)]
max_tokens: Option<i32>,
},
Groq {
#[serde(default = "default_groq_host")]
host: String,
api_key: String,
#[serde(default = "default_groq_model")]
model: String,
#[serde(default)]
temperature: Option<f32>,
#[serde(default)]
max_tokens: Option<i32>,
},
}

impl ProviderSettings {
Expand All @@ -99,6 +111,7 @@ impl ProviderSettings {
ProviderSettings::Databricks { .. } => ProviderType::Databricks,
ProviderSettings::Ollama { .. } => ProviderType::Ollama,
ProviderSettings::Google { .. } => ProviderType::Google,
ProviderSettings::Groq { .. } => ProviderType::Groq,
}
}

Expand Down Expand Up @@ -168,6 +181,19 @@ impl ProviderSettings {
.with_temperature(temperature)
.with_max_tokens(max_tokens),
}),
ProviderSettings::Groq {
host,
api_key,
model,
temperature,
max_tokens,
} => ProviderConfig::Groq(GroqProviderConfig {
host,
api_key,
model: ModelConfig::new(model)
.with_temperature(temperature)
.with_max_tokens(max_tokens),
}),
}
}
}
Expand Down Expand Up @@ -240,7 +266,7 @@ fn default_port() -> u16 {
}

fn default_model() -> String {
"gpt-4o".to_string()
OPEN_AI_DEFAULT_MODEL.to_string()
}

fn default_openai_host() -> String {
Expand All @@ -267,6 +293,14 @@ fn default_google_model() -> String {
google::GOOGLE_DEFAULT_MODEL.to_string()
}

fn default_groq_host() -> String {
groq::GROQ_API_HOST.to_string()
}

fn default_groq_model() -> String {
groq::GROQ_DEFAULT_MODEL.to_string()
}

fn default_image_format() -> ImageFormat {
ImageFormat::Anthropic
}
Expand Down
6 changes: 6 additions & 0 deletions crates/goose-server/src/state.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use anyhow::Result;
use goose::providers::configs::GroqProviderConfig;
use goose::{
agent::Agent,
developer::DeveloperSystem,
Expand Down Expand Up @@ -71,6 +72,11 @@ impl Clone for AppState {
model: config.model.clone(),
})
}
ProviderConfig::Groq(config) => ProviderConfig::Groq(GroqProviderConfig {
host: config.host.clone(),
api_key: config.api_key.clone(),
model: config.model.clone(),
}),
},
agent: self.agent.clone(),
secret_key: self.secret_key.clone(),
Expand Down
18 changes: 13 additions & 5 deletions crates/goose/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ description.workspace = true

[build-dependencies]
tokio = { version = "1.36", features = ["full"] }
reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false }
reqwest = { version = "0.12.9", features = ["json", "rustls-tls"], default-features = false }

[dependencies]
mcp-core = { path = "../mcp-core" }
Expand All @@ -18,7 +18,18 @@ anyhow = "1.0"
thiserror = "1.0"
futures = "0.3"
tokio-stream = "0.1"
reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false }
reqwest = { version = "0.12.9", features = [
"rustls-tls",
"json",
"cookies",
"gzip",
"brotli",
"deflate",
"zstd",
"charset",
"http2",
"stream"
], default-features = false }
tokio = { version = "1.0", features = ["full"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
Expand Down Expand Up @@ -73,6 +84,3 @@ path = "examples/databricks_oauth.rs"
[[bench]]
name = "tokenization_benchmark"
harness = false

#[target.aarch64-unknown-linux-gnu]
#linker = "aarch64-linux-gnu-ld"
1 change: 1 addition & 0 deletions crates/goose/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const MODELS: &[&str] = &[
"Xenova/gemma-2-tokenizer",
"Xenova/gpt-4o",
"Qwen/Qwen2.5-Coder-32B-Instruct",
"Xenova/llama3-tokenizer",
];

#[tokio::main]
Expand Down
4 changes: 4 additions & 0 deletions crates/goose/src/providers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@ pub mod model_pricing;
pub mod oauth;
pub mod ollama;
pub mod openai;
pub mod openai_utils;
pub mod utils;

pub mod google;
pub mod groq;
#[cfg(test)]
pub mod mock;
#[cfg(test)]
pub mod mock_server;
Loading

0 comments on commit 742a1e1

Please sign in to comment.