diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 15cfb7ca602..b7b37492c82 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -240,7 +240,7 @@ export class ChatGPTApi implements LLMApi { } // add max_tokens to vision model - if (visionModel) { + if (visionModel && modelConfig.model !== "glm-4v-flash") { requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); } } diff --git a/app/utils.ts b/app/utils.ts index 30f95fa02eb..169c6e182b2 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -265,6 +265,7 @@ export function isVisionModel(model: string) { "learnlm", "qwen-vl", "qwen2-vl", + "glm-4v", ]; const isGpt4Turbo = model.includes("gpt-4-turbo") && !model.includes("preview");