Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ai: add some new env variables to control OpenAI requests #231

Merged
merged 2 commits into from
Aug 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion client/ai/ai.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ func getOsName() string {
}

func GetAiSuggestionsViaHishtoryApi(ctx context.Context, shellName, query string, numberCompletions int) ([]string, error) {
hctx.GetLogger().Infof("Running OpenAI query for %#v", query)
hctx.GetLogger().Infof("Running OpenAI query for %#v via hishtory server", query)
req := ai.AiSuggestionRequest{
DeviceId: hctx.GetConf(ctx).DeviceId,
UserId: data.UserId(hctx.GetConf(ctx).UserSecret),
Expand Down
65 changes: 45 additions & 20 deletions shared/ai/ai.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"io"
"net/http"
"os"
"strconv"

"github.com/ddworken/hishtory/client/hctx"
"golang.org/x/exp/slices"
Expand Down Expand Up @@ -58,32 +59,15 @@ func GetAiSuggestionsViaOpenAiApi(apiEndpoint, query, shellName, osName string,
return results, OpenAiUsage{}, nil
}
hctx.GetLogger().Infof("Running OpenAI query for %#v", query)
if osName == "" {
osName = "Linux"
}
if shellName == "" {
shellName = "bash"
}
apiKey := os.Getenv("OPENAI_API_KEY")
if apiKey == "" && apiEndpoint == DefaultOpenAiEndpoint {
return nil, OpenAiUsage{}, fmt.Errorf("OPENAI_API_KEY environment variable is not set")
}
apiReq := openAiRequest{
Model: "gpt-3.5-turbo",
NumberCompletions: numberCompletions,
Messages: []openAiMessage{
{Role: "system", Content: "You are an expert programmer that loves to help people with writing shell commands. " +
"You always reply with just a shell command and no additional context, information, or formatting. " +
"Your replies will be directly executed in " + shellName + " on " + osName +
", so ensure that they are correct and do not contain anything other than a shell command."},
{Role: "user", Content: query},
},
}
apiReqStr, err := json.Marshal(apiReq)
apiReqStr, err := json.Marshal(createOpenAiRequest(query, shellName, osName, numberCompletions))
if err != nil {
return nil, OpenAiUsage{}, fmt.Errorf("failed to serialize JSON for OpenAI API: %w", err)
}
req, err := http.NewRequest("POST", apiEndpoint, bytes.NewBuffer(apiReqStr))
req, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewBuffer(apiReqStr))
if err != nil {
return nil, OpenAiUsage{}, fmt.Errorf("failed to create OpenAI API request: %w", err)
}
Expand All @@ -106,7 +90,7 @@ func GetAiSuggestionsViaOpenAiApi(apiEndpoint, query, shellName, osName string,
var apiResp openAiResponse
err = json.Unmarshal(bodyText, &apiResp)
if err != nil {
return nil, OpenAiUsage{}, fmt.Errorf("failed to parse OpenAI API response=%#v: %w", bodyText, err)
return nil, OpenAiUsage{}, fmt.Errorf("failed to parse OpenAI API response=%#v: %w", string(bodyText), err)
}
if len(apiResp.Choices) == 0 {
return nil, OpenAiUsage{}, fmt.Errorf("OpenAI API returned zero choices, parsed resp=%#v, resp body=%#v, resp.StatusCode=%d", apiResp, bodyText, resp.StatusCode)
Expand All @@ -133,3 +117,44 @@ type AiSuggestionRequest struct {
type AiSuggestionResponse struct {
Suggestions []string `json:"suggestions"`
}

func createOpenAiRequest(query, shellName, osName string, numberCompletions int) openAiRequest {
if osName == "" {
osName = "Linux"
}
if shellName == "" {
shellName = "bash"
}

model := os.Getenv("OPENAI_API_MODEL")
if model == "" {
// According to https://platform.openai.com/docs/models gpt-4o-mini is the best model
// by performance/price ratio.
model = "gpt-4o-mini"
}

if envNumberCompletions := os.Getenv("OPENAI_API_NUMBER_COMPLETIONS"); envNumberCompletions != "" {
n, err := strconv.Atoi(envNumberCompletions)
if err == nil {
numberCompletions = n
}
}

defaultSystemPrompt := "You are an expert programmer that loves to help people with writing shell commands. " +
"You always reply with just a shell command and no additional context, information, or formatting. " +
"Your replies will be directly executed in " + shellName + " on " + osName +
", so ensure that they are correct and do not contain anything other than a shell command."

if systemPrompt := os.Getenv("OPENAI_API_SYSTEM_PROMPT"); systemPrompt != "" {
defaultSystemPrompt = systemPrompt
}

return openAiRequest{
Model: model,
NumberCompletions: numberCompletions,
Messages: []openAiMessage{
{Role: "system", Content: defaultSystemPrompt},
{Role: "user", Content: query},
},
}
}
Loading