From 509018a81e48e52c696d4e6e00fda7fd20dced21 Mon Sep 17 00:00:00 2001 From: holonic Date: Sat, 23 Mar 2024 15:40:08 +0000 Subject: [PATCH] output_text token length halved for 2048 context models --- image/scene.prowl | 2 +- output.prowl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/image/scene.prowl b/image/scene.prowl index 3e663f4..c50506c 100644 --- a/image/scene.prowl +++ b/image/scene.prowl @@ -31,4 +31,4 @@ Choose the artistic medium that this image is represented in, for example "penci ## Prompt Composition Write a comma delimited set of key phrases on a single line which describes all of the above. be sure to only use commas to separate phrases: -{scene_prompt(520, 0.0)} +{scene_prompt(520, 0.1)} diff --git a/output.prowl b/output.prowl index d9c3d39..715466b 100644 --- a/output.prowl +++ b/output.prowl @@ -1,2 +1,2 @@ # Fulfill the User Request -{output_text(2048, 0.2)} +{output_text(1024, 0.2)}