Skip to content

Commit

Permalink
🔧 forceCompletionsEndpointType option
Browse files Browse the repository at this point in the history
  • Loading branch information
sestinj committed Apr 18, 2024
1 parent b50c327 commit dd51fcb
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 99 deletions.
8 changes: 1 addition & 7 deletions core/autocomplete/completionProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { IDE, ILLM, Position, TabAutocompleteOptions } from "..";
import { RangeInFileWithContents } from "../commands/util";
import { ConfigHandler } from "../config/handler";
import { streamLines } from "../diff/util";
import OpenAI from "../llm/llms/OpenAI";
import { getBasename } from "../util";
import { logDevData } from "../util/devdata";
import { DEFAULT_AUTOCOMPLETE_OPTS } from "../util/parameters";
Expand Down Expand Up @@ -115,12 +114,7 @@ export async function getTabCompletion(
}

// Model
if (llm instanceof OpenAI) {
llm.useLegacyCompletionsEndpoint = true;
} else if (
llm.providerName === "free-trial" &&
llm.model !== "starcoder-7b"
) {
if (llm.providerName === "free-trial" && llm.model !== "starcoder-7b") {
throw new Error(
"The only free trial model supported for tab-autocomplete is starcoder-7b.",
);
Expand Down
2 changes: 0 additions & 2 deletions core/config/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,8 +291,6 @@ declare global {
apiKey?: string;
apiBase?: string;
useLegacyCompletionsEndpoint?: boolean;
// Azure options
engine?: string;
apiVersion?: string;
Expand Down
10 changes: 7 additions & 3 deletions core/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,9 @@ export interface LLMFullCompletionOptions extends BaseCompletionOptions {

model?: string;
}

export type CompletionsEndpointType = "/completions" | "/chat/completions";

export interface LLMOptions {
model: string;

Expand All @@ -289,7 +292,7 @@ export interface LLMOptions {
apiKey?: string;
apiBase?: string;

useLegacyCompletionsEndpoint?: boolean;
forceCompletionsEndpointType?: CompletionsEndpointType;

// Azure options
engine?: string;
Expand Down Expand Up @@ -550,8 +553,8 @@ export type ModelName =
| "claude-3-haiku-20240307"
| "claude-2.1"
// Cohere
|"command-r"
|"command-r-plus"
| "command-r"
| "command-r-plus"
// Gemini
| "gemini-pro"
| "gemini-1.5-pro-latest"
Expand Down Expand Up @@ -625,6 +628,7 @@ export interface ModelDescription {
systemMessage?: string;
requestOptions?: RequestOptions;
promptTemplates?: { [key: string]: string };
forceCompletionsEndpointType?: CompletionsEndpointType;
}

export type EmbeddingsProviderName =
Expand Down
52 changes: 37 additions & 15 deletions core/llm/llms/OpenAI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { BaseLLM } from "..";
import {
ChatMessage,
CompletionOptions,
CompletionsEndpointType,
LLMOptions,
ModelProvider,
} from "../..";
Expand Down Expand Up @@ -38,12 +39,12 @@ const CHAT_ONLY_MODELS = [
];

class OpenAI extends BaseLLM {
public useLegacyCompletionsEndpoint = false;
public forceCompletionsEndpointType: CompletionsEndpointType | undefined =
undefined;

constructor(options: LLMOptions) {
super(options);
this.useLegacyCompletionsEndpoint =
options.useLegacyCompletionsEndpoint ?? false;
this.forceCompletionsEndpointType = options.forceCompletionsEndpointType;
}

static providerName: ModelProvider = "openai";
Expand Down Expand Up @@ -136,11 +137,18 @@ class OpenAI extends BaseLLM {
prompt: string,
options: CompletionOptions,
): AsyncGenerator<string> {
for await (const chunk of this._streamChat(
[{ role: "user", content: prompt }],
options,
)) {
yield stripImages(chunk.content);
const completionsEndpointType = this._completionsEndpointType(options);
if (completionsEndpointType === "/completions") {
for await (const update of this._legacystreamComplete(prompt, options)) {
yield update;
}
} else {
for await (const chunk of this._streamChat(
[{ role: "user", content: prompt }],
options,
)) {
yield stripImages(chunk.content);
}
}
}

Expand Down Expand Up @@ -172,17 +180,31 @@ class OpenAI extends BaseLLM {
}
}

private _completionsEndpointType(
options: CompletionOptions,
): CompletionsEndpointType {
// If this is set, the user's choice overrides whatever other logic we may have
if (this.forceCompletionsEndpointType) {
return this.forceCompletionsEndpointType;
}

// Distinguish between models that require one endpoint or the other,
// check for providers that don't support the legacy /completions,
// and allow `"raw": true` to be used to call /completions
const shouldUseRawCompletions =
!CHAT_ONLY_MODELS.includes(options.model) &&
this.supportsCompletions() &&
(NON_CHAT_MODELS.includes(options.model) || options.raw);

return shouldUseRawCompletions ? "/completions" : "/chat/completions";
}

protected async *_streamChat(
messages: ChatMessage[],
options: CompletionOptions,
): AsyncGenerator<ChatMessage> {
if (
!CHAT_ONLY_MODELS.includes(options.model) &&
this.supportsCompletions() &&
(NON_CHAT_MODELS.includes(options.model) ||
this.useLegacyCompletionsEndpoint ||
options.raw)
) {
// Decision point for /completions vs. /chat/completions
if (this._completionsEndpointType(options) === "/completions") {
for await (const content of this._legacystreamComplete(
stripImages(messages[messages.length - 1]?.content || ""),
options,
Expand Down
22 changes: 5 additions & 17 deletions docs/static/schemas/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,11 @@
},
"apiVersion": {
"type": "string"
},
"forceCompletionsEndpointType": {
"enum": ["/completions", "/chat/completions"],
"title": "Force Completions Endpoint Type",
"markdownDescription": "Use this property only if your OpenAI-compatible API requires you to use either /completions or /chat/completions (see [this issue](https://github.com/continuedev/continue/issues/1132) for an example). In the majority of cases this can be left undefined, and Continue will automatically call the appropriate endpoint."
}
}
}
Expand Down Expand Up @@ -810,23 +815,6 @@
"required": ["engine", "apiVersion", "apiBase"]
}
},
{
"if": {
"properties": {
"provider": {
"enum": ["openai"]
}
},
"required": ["provider"]
},
"then": {
"properties": {
"useLegacyCompletionsEndpoint": {
"type": "boolean"
}
}
}
},
{
"if": {
"properties": {
Expand Down
22 changes: 5 additions & 17 deletions extensions/intellij/src/main/resources/config_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,11 @@
},
"apiVersion": {
"type": "string"
},
"forceCompletionsEndpointType": {
"enum": ["/completions", "/chat/completions"],
"title": "Force Completions Endpoint Type",
"markdownDescription": "Use this property only if your OpenAI-compatible API requires you to use either /completions or /chat/completions (see [this issue](https://github.com/continuedev/continue/issues/1132) for an example). In the majority of cases this can be left undefined, and Continue will automatically call the appropriate endpoint."
}
}
}
Expand Down Expand Up @@ -810,23 +815,6 @@
"required": ["engine", "apiVersion", "apiBase"]
}
},
{
"if": {
"properties": {
"provider": {
"enum": ["openai"]
}
},
"required": ["provider"]
},
"then": {
"properties": {
"useLegacyCompletionsEndpoint": {
"type": "boolean"
}
}
}
},
{
"if": {
"properties": {
Expand Down
22 changes: 5 additions & 17 deletions extensions/vscode/config_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,11 @@
},
"apiVersion": {
"type": "string"
},
"forceCompletionsEndpointType": {
"enum": ["/completions", "/chat/completions"],
"title": "Force Completions Endpoint Type",
"markdownDescription": "Use this property only if your OpenAI-compatible API requires you to use either /completions or /chat/completions (see [this issue](https://github.com/continuedev/continue/issues/1132) for an example). In the majority of cases this can be left undefined, and Continue will automatically call the appropriate endpoint."
}
}
}
Expand Down Expand Up @@ -810,23 +815,6 @@
"required": ["engine", "apiVersion", "apiBase"]
}
},
{
"if": {
"properties": {
"provider": {
"enum": ["openai"]
}
},
"required": ["provider"]
},
"then": {
"properties": {
"useLegacyCompletionsEndpoint": {
"type": "boolean"
}
}
}
},
{
"if": {
"properties": {
Expand Down
29 changes: 8 additions & 21 deletions extensions/vscode/continue_rc_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,14 @@
},
"apiVersion": {
"type": "string"
},
"forceCompletionsEndpointType": {
"enum": [
"/completions",
"/chat/completions"
],
"title": "Force Completions Endpoint Type",
"markdownDescription": "Use this property only if your OpenAI-compatible API requires you to use either /completions or /chat/completions (see [this issue](https://github.com/continuedev/continue/issues/1132) for an example). In the majority of cases this can be left undefined, and Continue will automatically call the appropriate endpoint."
}
}
}
Expand Down Expand Up @@ -905,27 +913,6 @@
]
}
},
{
"if": {
"properties": {
"provider": {
"enum": [
"openai"
]
}
},
"required": [
"provider"
]
},
"then": {
"properties": {
"useLegacyCompletionsEndpoint": {
"type": "boolean"
}
}
}
},
{
"if": {
"properties": {
Expand Down

0 comments on commit dd51fcb

Please sign in to comment.