Skip to content

Commit

Permalink
feat: ET-1441: add support for configurable maximum input and output …
Browse files Browse the repository at this point in the history
…token limits (#6)

* feat: ET-1441: add support for configurable maximum input and output tokens in OpenAI API calls and update config keys and validation accordingly

* feat: ET-1441: update OCO_TOKENS_MAX_INPUT and OCO_TOKENS_MAX_OUTPUT default values in README.md

* feat: ET-1441: update generateCommitMessageFromGitDiff.ts to use MAX_TOKENS_OUTPUT from config for calculating MAX_REQUEST_TOKENS value

* feat: ET-1441: updated readme with correct config wording

* fix: ET-1441: fix typo in variable name, change OCO_TOKENS_MAX_INPUT to OCO_TOKENS_MAX_OUTPUT

* feat: ET-1441: add error handling for outputTokensTooHigh when maxChangeLength is exceeded and improve code formatting in generateCommitMessageFromGitDiff.ts file

* fix: ET-1441: remove unnecessary space in the code to improve code readability and consistency

* refactor: ET-1441: update README and error handling for token limits in generateCommitMessageFromGitDiff

* refactor: ET-1441: refactor token limit constants and improve error handling in generateCommitMessageFromGitDiff module

---------

Co-authored-by: Matthew Salter <[email protected]>
  • Loading branch information
mattsalt123 and Matthew Salter authored Nov 28, 2023
1 parent c3fb67b commit ff2ac7e
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 19 deletions.
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ OCO_EMOJI=<boolean, add GitMoji>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
OCO_OPENAI_MAX_TOKENS=<max response tokens (default: 500)>
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
OCO_ISSUE_ENABLED=<boolean, issue ID included within commit message - default to true if issue prefix has been set>
OCO_ISSUE_PREFIX=<optional prefix for issue ID, eg. 'ABC-'>
```
Expand Down Expand Up @@ -372,7 +373,8 @@ jobs:
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
# customization
OCO_OPENAI_MAX_TOKENS: 500
OCO_TOKENS_MAX_INPUT: 4096
OCO_TOKENS_MAX_OUTPUT: 500
OCO_OPENAI_BASE_PATH: ''
OCO_DESCRIPTION: false
OCO_EMOJI: false
Expand Down
10 changes: 5 additions & 5 deletions src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import { intro, outro } from '@clack/prompts';
import {
CONFIG_MODES,
AI_TYPE,
DEFAULT_MODEL_TOKEN_LIMIT,
getConfig
} from './commands/config';
import { GenerateCommitMessageErrorEnum } from './generateCommitMessageFromGitDiff';
Expand All @@ -21,7 +20,8 @@ import { IDENTITY } from './prompts';

const config = getConfig();

const MAX_TOKENS = config?.OCO_OPENAI_MAX_TOKENS;
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || 500;
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || 4096;
const BASE_PATH = config?.OCO_OPENAI_BASE_PATH;
const API_KEY = config?.OCO_OPENAI_API_KEY;
const API_TYPE = config?.OCO_OPENAI_API_TYPE || AI_TYPE.OPENAI;
Expand Down Expand Up @@ -83,7 +83,7 @@ class OpenAi {
messages,
temperature: 0,
top_p: 0.1,
max_tokens: MAX_TOKENS || 500
max_tokens: MAX_TOKENS_OUTPUT
};
try {
const completionReponse = await this.openAI.createChatCompletion({
Expand Down Expand Up @@ -148,14 +148,14 @@ class OpenAi {
messages,
temperature: 0,
top_p: 0.1,
max_tokens: MAX_TOKENS || 500
max_tokens: MAX_TOKENS_OUTPUT
};
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content) + 4)
.reduce((a, b) => a + b, 0);

if (REQUEST_TOKENS > DEFAULT_MODEL_TOKEN_LIMIT - MAX_TOKENS) {
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}

Expand Down
37 changes: 29 additions & 8 deletions src/commands/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ dotenv.config();

export enum CONFIG_KEYS {
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
OCO_OPENAI_MAX_TOKENS = 'OCO_OPENAI_MAX_TOKENS',
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
OCO_OPENAI_API_TYPE = 'OCO_OPENAI_API_TYPE',
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
Expand All @@ -35,8 +36,6 @@ export enum AI_TYPE {
AZURE = 'azure'
}

export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;

export enum CONFIG_MODES {
get = 'get',
set = 'set'
Expand Down Expand Up @@ -85,18 +84,37 @@ export const configValidators = {
return value;
},

[CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS](value: any) {
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) {
// If the value is a string, convert it to a number.
if (typeof value === 'string') {
value = parseInt(value);
validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT,
!isNaN(value),
'Must be a number'
);
}
validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT,
value ? typeof value === 'number' : undefined,
'Must be a number'
);

return value;
},

[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT](value: any) {
// If the value is a string, convert it to a number.
if (typeof value === 'string') {
value = parseInt(value);
validateConfig(
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
!isNaN(value),
'Must be a number'
);
}
validateConfig(
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
value ? typeof value === 'number' : undefined,
'Must be a number'
);
Expand Down Expand Up @@ -226,8 +244,11 @@ const configPath = pathJoin(homedir(), '.opencommit');
export const getConfig = (): ConfigType | null => {
const configFromEnv = {
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
OCO_OPENAI_MAX_TOKENS: process.env.OCO_OPENAI_MAX_TOKENS
? Number(process.env.OCO_OPENAI_MAX_TOKENS)
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
? Number(process.env.OCO_TOKENS_MAX_INPUT)
: undefined,
OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT
? Number(process.env.OCO_TOKENS_MAX_OUTPUT)
: undefined,
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
OCO_OPENAI_API_TYPE: process.env.OCO_OPENAI_API_TYPE || 'openai',
Expand Down
17 changes: 13 additions & 4 deletions src/generateCommitMessageFromGitDiff.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@ import {
} from 'openai';

import { api } from './api';
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
import { getConfig } from './commands/config';
import { getMainCommitPrompt } from './prompts';
import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount';

const config = getConfig();
const DEFAULT_MAX_TOKENS_INPUT = 4096;
const DEFAULT_MAX_TOKENS_OUTPUT = 500;
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || DEFAULT_MAX_TOKENS_INPUT;
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || DEFAULT_MAX_TOKENS_OUTPUT;

const generateCommitMessageChatCompletionPrompt = async (
diff: string,
Expand All @@ -30,7 +34,8 @@ const generateCommitMessageChatCompletionPrompt = async (
export enum GenerateCommitMessageErrorEnum {
tooMuchTokens = 'TOO_MUCH_TOKENS',
internalError = 'INTERNAL_ERROR',
emptyMessage = 'EMPTY_MESSAGE'
emptyMessage = 'EMPTY_MESSAGE',
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
}

const ADJUSTMENT_FACTOR = 20;
Expand All @@ -47,10 +52,10 @@ export const generateCommitMessageByDiff = async (
).reduce((a, b) => a + b, 0);

const MAX_REQUEST_TOKENS =
DEFAULT_MODEL_TOKEN_LIMIT -
MAX_TOKENS_INPUT -
ADJUSTMENT_FACTOR -
INIT_MESSAGES_PROMPT_LENGTH -
config?.OCO_OPENAI_MAX_TOKENS;
MAX_TOKENS_OUTPUT;

if (tokenCount(diff) >= MAX_REQUEST_TOKENS) {
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs(
Expand Down Expand Up @@ -132,6 +137,10 @@ function splitDiff(diff: string, maxChangeLength: number) {
const lines = diff.split('\n');
const splitDiffs = [];
let currentDiff = '';

if (maxChangeLength <= 0) {
throw new Error(GenerateCommitMessageErrorEnum.outputTokensTooHigh);
}

for (let line of lines) {
// If a single line exceeds maxChangeLength, split it into multiple lines
Expand Down

0 comments on commit ff2ac7e

Please sign in to comment.