diff --git a/assets/index-93cbaed8.js b/assets/index-bbbdba52.js similarity index 98% rename from assets/index-93cbaed8.js rename to assets/index-bbbdba52.js index 3fa04cc6..b08d3678 100644 --- a/assets/index-93cbaed8.js +++ b/assets/index-bbbdba52.js @@ -3267,7 +3267,11 @@ boo`,[1,4853,13,4136,13,833,29877]),r("镇",[1,29871,30411]),r("🦙",[1,29871,2 `+n.content+`\r \r \r -`),countPromptTokens:(n,e,t)=>n.reduce((a,r)=>(a+=Ur(r,e,t),a),0)+3},ld={...sa,prompt:15e-7,completion:2e-6,max:4096},Fi={...sa,prompt:1e-6,completion:15e-7,max:16384},_o={...sa,prompt:3e-5,completion:6e-5,max:8192},fC={...sa,prompt:5e-6,completion:15e-6,max:131072},dd={...sa,prompt:15e-8,completion:6e-7,max:131072},FC={...sa,prompt:15e-6,completion:6e-5,max:131072},yC={...sa,prompt:3e-6,completion:12e-6,max:131072},xo={...sa,prompt:6e-5,completion:12e-5,max:32768},yi={...sa,prompt:1e-5,completion:3e-5,max:131072},cd={...sa,prompt:0,completion:0,max:1024e3},EC={"gpt-3.5-turbo":{...Fi},"gpt-3.5-turbo-0301":{...ld},"gpt-3.5-turbo-0613":{...ld},"gpt-3.5-turbo-1106":{...Fi},"gpt-3.5-turbo-16k":{...Fi},"gpt-3.5-turbo-16k-0613":{...Fi},"gpt-4":{..._o},"gpt-4o":{...fC},"gpt-4o-mini":{...dd},"gpt-4o-mini-2024-07-18":{...dd},"gpt-4-turbo-preview":{...yi},"gpt-4-turbo-2024-04-09":{...yi},"gpt-4-0314":{..._o},"gpt-4-0613":{..._o},"gpt-4-1106-preview":{...yi},"gpt-4-0125-preview":{...yi},"gpt-4-32k":{...xo},"gpt-4-32k-0314":{...xo},"gpt-4-32k-0613":{...xo},"o1-preview":{...FC},"o1-mini":{...yC}},RC=async()=>{const n=await Xu();return Object.keys(n).forEach(e=>{n[e]={...sa,...n[e]}}),n},Wa={type:"image",prompt:0,max:1e3,request:QC,check:Yu,getTokens:n=>[0],getEndpoint:n=>mo()+MA(),hideSetting:(n,e)=>!1},vC={"dall-e-1024x1024":{...Wa,completion:.02,opt:{size:"1024x1024"}},"dall-e-512x512":{...Wa,completion:.018,opt:{size:"512x512"}},"dall-e-256x256":{...Wa,type:"image",completion:.016,opt:{size:"256x256"}},"dall-e-3-1024x1024":{...Wa,type:"image",completion:.04,opt:{model:"dall-e-3",size:"1024x1024"}},"dall-e-3-1024x1792-Portrait":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1024x1792"}},"dall-e-3-1792x1024-Landscape":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1792x1024"}},"dall-e-3-1024x1024-HD":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1024x1024",quality:"hd"}},"dall-e-3-1024x1792-Portrait-HD":{...Wa,type:"image",completion:.12,opt:{model:"dall-e-3",size:"1024x1792",quality:"hd"}},"dall-e-3-1792x1024-Landscape-HD":{...Wa,type:"image",completion:.12,opt:{model:"dall-e-3",size:"1792x1024",quality:"hd"}}},Vu={...EC,..._A},Mu={...vC},Ii={...Vu,...Mu};Object.entries(Ii).forEach(([n,e])=>{e.id=n,e.modelQuery=e.modelQuery||n});const ud={},Ut=n=>{if(typeof n!="string")return console.warn("Invalid type for model:",n),{...cd,id:n,modelQuery:n};let e=Ii[n]||ud[n];if(e)return e;const a=Object.keys(Ii).sort((r,i)=>i.length-r.length).find(r=>n.startsWith(r));return a?e=Ii[a]:(console.warn("Unable to find model detail for:",n),e={...cd,id:n,modelQuery:n}),ud[n]=e,e},Oi=n=>Ut(n).getEndpoint(n),Du=n=>la(n.settings,n.settings.startSequence||Sn(n.id,Mn("startSequence").placeholder)),SC=n=>n.settings.stopSequence||Sn(n.id,Mn("stopSequence").placeholder),Uu=n=>n.settings.delimiter||Sn(n.id,Mn("delimiter").placeholder),Pi=n=>la(n.settings,n.settings.leadPrompt||Sn(n.id,Mn("leadPrompt").placeholder)),GC=n=>la(n.settings,n.settings.userMessageStart||Sn(n.id,Mn("userMessageStart").placeholder)),kC=n=>la(n.settings,n.settings.userMessageEnd||Sn(n.id,Mn("userMessageEnd").placeholder)),HC=n=>la(n.settings,n.settings.assistantMessageStart||Sn(n.id,Mn("assistantMessageStart").placeholder)),NC=n=>la(n.settings,n.settings.assistantMessageEnd||Sn(n.id,Mn("assistantMessageEnd").placeholder)),TC=n=>la(n.settings,n.settings.systemMessageStart||Sn(n.id,Mn("systemMessageStart").placeholder)),ZC=n=>la(n.settings,n.settings.systemMessageEnd||Sn(n.id,Mn("systemMessageEnd").placeholder)),_u=(n,e,t)=>n==="assistant"?HC(t)+" ":n==="user"?GC(t)+" ":TC(t)+" ",xu=(n,e,t)=>n==="assistant"?NC(t):n==="user"?kC(t):ZC(t),Ju=(n,e)=>Ut(n).getTokens(e),Za=(n,e)=>Ju(n,e).length,Vn=()=>{const n=dt(dn)||{};return!!dt(er)||!!n.enablePetals},pd=(n,e)=>{const t=n.text.toLowerCase(),a=e.text.toLowerCase();return ta?1:0};async function Ou(){const e=mo().includes("openai.com")?{}:await RC(),t=Object.keys({...Vu,...e}),a=[],r=[];for(let i=0,o=t.length;idt(er)?(await Ou())[0].text:"stabilityai/StableBeluga2",iB=()=>Zs,oB=()=>Zs.filter(n=>n.key in rp),ap=n=>!!op[n],Mn=n=>{const e=op[n];return e||console.error(`Chat Setting "${n}" not defined in Settings array.`),e},sB=n=>mB[n],yr=()=>lB,Er=()=>dB,bn=(n,e)=>Ut(Tt(n).model).hideSetting(n,e),rp={model:"",messages:[],temperature:1,top_p:1,n:1,stream:!0,stop:null,max_completion_tokens:512,presence_penalty:0,frequency_penalty:0,logit_bias:null,user:void 0},lB={...rp,profile:"",characterName:"ChatGPT",profileName:"",profileDescription:"",continuousChat:"fifo",summaryThreshold:3e3,summarySize:1e3,summaryExtend:0,summaryTemperature:.1,pinTop:0,pinBottom:6,summaryPrompt:"",useSystemPrompt:!1,systemPrompt:"",hideSystemPrompt:!1,sendSystemPromptLast:!1,autoStartSession:!1,trainingPrompts:[],hiddenPromptPrefix:"",hppContinuePrompt:"",hppWithSummaryPrompt:!1,imageGenerationModel:"",startSequence:"",stopSequence:"",aggressiveStop:!0,delimiter:"",userMessageStart:"",userMessageEnd:"",assistantMessageStart:"",assistantMessageEnd:"",systemMessageStart:"",systemMessageEnd:"",leadPrompt:"",repetitionPenalty:1.1,holdSocket:!0,isDirty:!1},ip={profiles:{},lastProfile:"default",defaultProfile:"default",hideSummarized:!1,chatSort:"created",openAICompletionEndpoint:"",enablePetals:!1,pedalsEndpoint:"",openAiEndpoint:"https://api.openai.com"},dB={messages:!0,user:!0,isDirty:!0},Kr={name:{text:"Name",icon:zC,value:"",sortFn:(n,e)=>n.namee.name?1:0},created:{text:"Created",icon:Jo,value:"",sortFn:(n,e)=>(e.created||0)-(n.created||0)||e.id-n.id},lastUse:{text:"Last Use",icon:Jo,value:"",sortFn:(n,e)=>(e.lastUse||0)-(n.lastUse||0)||e.id-n.id},lastAccess:{text:"Last View",icon:Jo,value:"",sortFn:(n,e)=>(e.lastAccess||0)-(n.lastAccess||0)||e.id-n.id}};Object.entries(Kr).forEach(([n,e])=>{e.value=n});const cB={key:"profile",name:"Profile",title:"Choose how you want your assistant to act.",header:"Profile / Presets",headerClass:"is-info",options:[],type:"select",afterChange:(n,e)=>(ji(n),!0),fieldControls:[{getAction:(n,e,t)=>t===Ys().defaultProfile?{title:"This profile is currently your default",icon:aB}:{title:"Set this profile as your default",icon:tB,class:"is-info",action:(a,r,i)=>{ma("defaultProfile",i)}}}]},uB=[{key:"profileName",name:"Profile Name",title:"How this profile is displayed in the select list.",type:"text"},{key:"profileDescription",name:"Description",title:"How this profile is displayed in the select list.",type:"textarea"},{key:"useSystemPrompt",name:"Use Character / System Prompt",title:'Send a "System" prompt as the first prompt.',header:"System Prompt",headerClass:"is-info",type:"boolean"},{key:"characterName",name:"Character Name",title:"What the personality of this profile will be called.",type:"text",hide:n=>!Tt(n).useSystemPrompt},{key:"systemPrompt",name:"System Prompt",title:"First prompt to send.",placeholder:"Enter the first prompt to send here. You can tell ChatGPT how to act.",type:"textarea",hide:n=>!Tt(n).useSystemPrompt},{key:"sendSystemPromptLast",name:"Send System Prompt Last (Can help in gpt 3.5 in some edge cases)",title:"ChatGPT 3.5 can often forget the System Prompt. Sending the system prompt at the end instead of the start of the messages can help.",type:"boolean"},{key:"hiddenPromptPrefix",name:"Hidden Prompts Prefix",title:"Prompts that will be silently injected before every new user prompt, then removed from history.",placeholder:"Enter user prompt prefix here. You can remind ChatGPT how to act. Use ::EOM:: to separate messages.",type:"textarea",hide:n=>!Tt(n).useSystemPrompt},{key:"hppContinuePrompt",name:"Continue Truncation Prompt",title:"If using Hidden Prompts Prefix, a prompt that can be used to help continue a truncated completion.",placeholder:"Enter something like [Continue your response below:]",type:"textarea",hide:n=>!Tt(n).useSystemPrompt||!(Tt(n).hiddenPromptPrefix||"").trim()},{key:"hppWithSummaryPrompt",name:"Use Hidden Prompt Prefix before Summary Prompt",title:"If using Hidden Prompts Prefix, should it also be included before the summary request",placeholder:"Enter something like [Continue your response below:]",type:"boolean",hide:n=>!Tt(n).useSystemPrompt||!(Tt(n).hiddenPromptPrefix||"").trim()},{key:"trainingPrompts",name:"Training Prompts",title:"Prompts used to train.",type:"other",hide:n=>!0},{key:"hideSystemPrompt",name:"Hide System Prompt",title:"Don't show system prompt when displaying message stream.",type:"boolean",hide:n=>!Tt(n).useSystemPrompt},{key:"autoStartSession",name:"Auto-Start Session",title:"If possible, auto-start the chat session, sending a system prompt to get an initial response.",type:"boolean",hide:n=>!Tt(n).useSystemPrompt}],pB=[{key:"continuousChat",name:"Continuous Chat",header:"Continuous Chat",headerClass:"is-info",title:"When out of token space, summarize or remove past prompts and keep going.",type:"select",options:[{value:"",text:"OFF - Chat errors when token buffer full"},{value:"fifo",text:"FIFO - First message in is first out"},{value:"summary",text:"Summary - Summarize past messages"}],afterChange:(n,e)=>!0},{key:"summaryThreshold",name:"Token Threshold",title:"When prompt history breaks this threshold, past prompts will be summarized or rolled off to create space.",min:0,max:32e3,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"summarySize",name:"Max Summary Size",title:"Maximum number of tokens allowed for summary response.",min:128,max:1024,step:1,type:"number",hide:n=>Tt(n).continuousChat!=="summary"},{key:"summaryExtend",name:"Summary Extend",title:"Number of times a truncated summary can be extended.",type:"select-number",options:[{value:0,text:"0 - Summary must fit in first call."},{value:1,text:"1 - Allow one extra API call to extend."},{value:2,text:"2 - Allow two extra API calls to extend."}],hide:n=>Tt(n).continuousChat!=="summary"},{key:"summaryTemperature",name:"Summary Temperature",title:"What sampling temperature to use, between 0 and 2, when generating summary. Lower values, like 0, will be more deterministic.",min:0,max:2,step:.1,type:"number",hide:n=>Tt(n).continuousChat!=="summary"},{key:"pinTop",name:"Keep First Prompts",title:"When we run out of space and need to remove prompts, the top number of prompts will not be removed after summarization/FIFO.",min:0,max:4,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"pinBottom",name:"Keep Bottom Prompts",title:"When we run out of space and need to remove prompts, do not remove or summarize the the last number prompts you set here.",min:0,max:20,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"summaryPrompt",name:"Summary Generation Prompt",title:"A prompt used to summarize past prompts.",placeholder:"Enter a prompt that will be used to summarize past prompts here.",type:"textarea",hide:n=>Tt(n).continuousChat!=="summary"},{key:"imageGenerationModel",name:"Image Generation Model",header:"Image Generation",headerClass:"is-info",title:"Prompt an image with: show me an image of ...",type:"select",options:[]}],gB={key:"model",name:"Model",title:"The model to use. Some may cost more than others.",header:n=>Ut(Tt(n).model).help,headerClass:"is-warning",options:[],type:"select",forceApi:!0,afterChange:(n,e)=>!0},Zs=[cB,...uB,...pB,gB,{key:"stream",name:"Stream Response",title:"Stream responses as they are generated.",type:"boolean",hide:bn},{key:"holdSocket",name:"Continue WebSocket",title:"Hold WebSocket connection open and try to re-use for each new chat message. Faster, but message delimitation could get mangled.",type:"boolean",hide:bn},{key:"temperature",name:"Sampling Temperature",title:`What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. +`),countPromptTokens:(n,e,t)=>n.reduce((a,r)=>(a+=Ur(r,e,t),a),0)+3},ld={...sa,prompt:15e-7,completion:2e-6,max:4096},Fi={...sa,prompt:1e-6,completion:15e-7,max:16384},_o={...sa,prompt:3e-5,completion:6e-5,max:8192},fC={...sa,prompt:5e-6,completion:15e-6,max:131072},dd={...sa,prompt:15e-8,completion:6e-7,max:131072},FC={...sa,prompt:15e-6,completion:6e-5,max:131072},yC={...sa,prompt:3e-6,completion:12e-6,max:131072},xo={...sa,prompt:6e-5,completion:12e-5,max:32768},yi={...sa,prompt:1e-5,completion:3e-5,max:131072},cd={...sa,prompt:0,completion:0,max:1024e3},EC={"gpt-3.5-turbo":{...Fi},"gpt-3.5-turbo-0301":{...ld},"gpt-3.5-turbo-0613":{...ld},"gpt-3.5-turbo-1106":{...Fi},"gpt-3.5-turbo-16k":{...Fi},"gpt-3.5-turbo-16k-0613":{...Fi},"gpt-4":{..._o},"gpt-4o":{...fC},"gpt-4o-mini":{...dd},"gpt-4o-mini-2024-07-18":{...dd},"gpt-4-turbo-preview":{...yi},"gpt-4-turbo-2024-04-09":{...yi},"gpt-4-0314":{..._o},"gpt-4-0613":{..._o},"gpt-4-1106-preview":{...yi},"gpt-4-0125-preview":{...yi},"gpt-4-32k":{...xo},"gpt-4-32k-0314":{...xo},"gpt-4-32k-0613":{...xo},"o1-preview":{...FC},"o1-mini":{...yC}},RC=async()=>{const n=await Xu();return Object.keys(n).forEach(e=>{n[e]={...sa,...n[e]}}),n},Wa={type:"image",prompt:0,max:1e3,request:QC,check:Yu,getTokens:n=>[0],getEndpoint:n=>mo()+MA(),hideSetting:(n,e)=>!1},vC={"dall-e-1024x1024":{...Wa,completion:.02,opt:{size:"1024x1024"}},"dall-e-512x512":{...Wa,completion:.018,opt:{size:"512x512"}},"dall-e-256x256":{...Wa,type:"image",completion:.016,opt:{size:"256x256"}},"dall-e-3-1024x1024":{...Wa,type:"image",completion:.04,opt:{model:"dall-e-3",size:"1024x1024"}},"dall-e-3-1024x1792-Portrait":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1024x1792"}},"dall-e-3-1792x1024-Landscape":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1792x1024"}},"dall-e-3-1024x1024-HD":{...Wa,type:"image",completion:.08,opt:{model:"dall-e-3",size:"1024x1024",quality:"hd"}},"dall-e-3-1024x1792-Portrait-HD":{...Wa,type:"image",completion:.12,opt:{model:"dall-e-3",size:"1024x1792",quality:"hd"}},"dall-e-3-1792x1024-Landscape-HD":{...Wa,type:"image",completion:.12,opt:{model:"dall-e-3",size:"1792x1024",quality:"hd"}}},Vu={...EC,..._A},Mu={...vC},Ii={...Vu,...Mu};Object.entries(Ii).forEach(([n,e])=>{e.id=n,e.modelQuery=e.modelQuery||n});const ud={},Ut=n=>{if(typeof n!="string")return console.warn("Invalid type for model:",n),{...cd,id:n,modelQuery:n};let e=Ii[n]||ud[n];if(e)return e;const a=Object.keys(Ii).sort((r,i)=>i.length-r.length).find(r=>n.startsWith(r));return a?e=Ii[a]:(console.warn("Unable to find model detail for:",n),e={...cd,id:n,modelQuery:n}),ud[n]=e,e},Oi=n=>Ut(n).getEndpoint(n),Du=n=>la(n.settings,n.settings.startSequence||Sn(n.id,Mn("startSequence").placeholder)),SC=n=>n.settings.stopSequence||Sn(n.id,Mn("stopSequence").placeholder),Uu=n=>n.settings.delimiter||Sn(n.id,Mn("delimiter").placeholder),Pi=n=>la(n.settings,n.settings.leadPrompt||Sn(n.id,Mn("leadPrompt").placeholder)),GC=n=>la(n.settings,n.settings.userMessageStart||Sn(n.id,Mn("userMessageStart").placeholder)),kC=n=>la(n.settings,n.settings.userMessageEnd||Sn(n.id,Mn("userMessageEnd").placeholder)),HC=n=>la(n.settings,n.settings.assistantMessageStart||Sn(n.id,Mn("assistantMessageStart").placeholder)),NC=n=>la(n.settings,n.settings.assistantMessageEnd||Sn(n.id,Mn("assistantMessageEnd").placeholder)),TC=n=>la(n.settings,n.settings.systemMessageStart||Sn(n.id,Mn("systemMessageStart").placeholder)),ZC=n=>la(n.settings,n.settings.systemMessageEnd||Sn(n.id,Mn("systemMessageEnd").placeholder)),_u=(n,e,t)=>n==="assistant"?HC(t)+" ":n==="user"?GC(t)+" ":TC(t)+" ",xu=(n,e,t)=>n==="assistant"?NC(t):n==="user"?kC(t):ZC(t),Ju=(n,e)=>Ut(n).getTokens(e),Za=(n,e)=>Ju(n,e).length,Vn=()=>{const n=dt(dn)||{};return!!dt(er)||!!n.enablePetals},pd=(n,e)=>{const t=n.text.toLowerCase(),a=e.text.toLowerCase();return ta?1:0};async function Ou(){const e=mo().includes("openai.com")?{}:await RC(),t=Object.keys({...Vu,...e}),a=[],r=[];for(let i=0,o=t.length;idt(er)?(await Ou())[0].text:"stabilityai/StableBeluga2",iB=()=>Zs,oB=()=>Zs.filter(n=>n.key in rp),ap=n=>!!op[n],Mn=n=>{const e=op[n];return e||console.error(`Chat Setting "${n}" not defined in Settings array.`),e},sB=n=>mB[n],yr=()=>lB,Er=()=>dB,bn=(n,e)=>Ut(Tt(n).model).hideSetting(n,e),rp={model:"",messages:[],temperature:1,top_p:1,n:1,stream:!0,stop:null,max_completion_tokens:512,presence_penalty:0,frequency_penalty:0,logit_bias:null,user:void 0},lB={...rp,profile:"",characterName:"ChatGPT",profileName:"",profileDescription:"",continuousChat:"fifo",summaryThreshold:3e3,summarySize:1e3,summaryExtend:0,summaryTemperature:.1,pinTop:0,pinBottom:6,summaryPrompt:"",useSystemPrompt:!1,systemPrompt:"",hideSystemPrompt:!1,sendSystemPromptLast:!1,autoStartSession:!1,trainingPrompts:[],hiddenPromptPrefix:"",hppContinuePrompt:"",hppWithSummaryPrompt:!1,imageGenerationModel:"",startSequence:"",stopSequence:"",aggressiveStop:!0,delimiter:"",userMessageStart:"",userMessageEnd:"",assistantMessageStart:"",assistantMessageEnd:"",systemMessageStart:"",systemMessageEnd:"",leadPrompt:"",repetitionPenalty:1.1,holdSocket:!0,isDirty:!1},ip={profiles:{},lastProfile:"default",defaultProfile:"default",hideSummarized:!1,chatSort:"created",openAICompletionEndpoint:"",enablePetals:!1,pedalsEndpoint:"",openAiEndpoint:"https://api.openai.com"},dB={messages:!0,user:!0,isDirty:!0},Kr={name:{text:"Name",icon:zC,value:"",sortFn:(n,e)=>n.namee.name?1:0},created:{text:"Created",icon:Jo,value:"",sortFn:(n,e)=>(e.created||0)-(n.created||0)||e.id-n.id},lastUse:{text:"Last Use",icon:Jo,value:"",sortFn:(n,e)=>(e.lastUse||0)-(n.lastUse||0)||e.id-n.id},lastAccess:{text:"Last View",icon:Jo,value:"",sortFn:(n,e)=>(e.lastAccess||0)-(n.lastAccess||0)||e.id-n.id}};Object.entries(Kr).forEach(([n,e])=>{e.value=n});const cB={key:"profile",name:"Profile",title:"Choose how you want your assistant to act.",header:"Profile / Presets",headerClass:"is-info",options:[],type:"select",afterChange:(n,e)=>(ji(n),!0),fieldControls:[{getAction:(n,e,t)=>t===Ys().defaultProfile?{title:"This profile is currently your default",icon:aB}:{title:"Set this profile as your default",icon:tB,class:"is-info",action:(a,r,i)=>{ma("defaultProfile",i)}}}]},uB=[{key:"profileName",name:"Profile Name",title:"How this profile is displayed in the select list.",type:"text"},{key:"profileDescription",name:"Description",title:"How this profile is displayed in the select list.",type:"textarea"},{key:"useSystemPrompt",name:"Use Character / System Prompt",title:'Send a "System" prompt as the first prompt.',header:"System Prompt",headerClass:"is-info",type:"boolean"},{key:"characterName",name:"Character Name",title:"What the personality of this profile will be called.",type:"text",hide:n=>!Tt(n).useSystemPrompt},{key:"systemPrompt",name:"System Prompt",title:"First prompt to send.",placeholder:"Enter the first prompt to send here. You can tell ChatGPT how to act.",type:"textarea",hide:n=>!Tt(n).useSystemPrompt},{key:"sendSystemPromptLast",name:"Send System Prompt Last (Can help in gpt 3.5 in some edge cases)",title:"ChatGPT 3.5 can often forget the System Prompt. Sending the system prompt at the end instead of the start of the messages can help.",type:"boolean"},{key:"hiddenPromptPrefix",name:"Hidden Prompts Prefix",title:"Prompts that will be silently injected before every new user prompt, then removed from history.",placeholder:"Enter user prompt prefix here. You can remind ChatGPT how to act. Use ::EOM:: to separate messages.",type:"textarea",hide:n=>!Tt(n).useSystemPrompt},{key:"hppContinuePrompt",name:"Continue Truncation Prompt",title:"If using Hidden Prompts Prefix, a prompt that can be used to help continue a truncated completion.",placeholder:"Enter something like [Continue your response below:]",type:"textarea",hide:n=>!Tt(n).useSystemPrompt||!(Tt(n).hiddenPromptPrefix||"").trim()},{key:"hppWithSummaryPrompt",name:"Use Hidden Prompt Prefix before Summary Prompt",title:"If using Hidden Prompts Prefix, should it also be included before the summary request",placeholder:"Enter something like [Continue your response below:]",type:"boolean",hide:n=>!Tt(n).useSystemPrompt||!(Tt(n).hiddenPromptPrefix||"").trim()},{key:"trainingPrompts",name:"Training Prompts",title:"Prompts used to train.",type:"other",hide:n=>!0},{key:"hideSystemPrompt",name:"Hide System Prompt",title:"Don't show system prompt when displaying message stream.",type:"boolean",hide:n=>!Tt(n).useSystemPrompt},{key:"autoStartSession",name:"Auto-Start Session",title:"If possible, auto-start the chat session, sending a system prompt to get an initial response.",type:"boolean",hide:n=>!Tt(n).useSystemPrompt}],pB=[{key:"continuousChat",name:"Continuous Chat",header:"Continuous Chat",headerClass:"is-info",title:"When out of token space, summarize or remove past prompts and keep going.",type:"select",options:[{value:"",text:"OFF - Chat errors when token buffer full"},{value:"fifo",text:"FIFO - First message in is first out"},{value:"summary",text:"Summary - Summarize past messages"}],afterChange:(n,e)=>!0},{key:"summaryThreshold",name:"Token Threshold",title:"When prompt history breaks this threshold, past prompts will be summarized or rolled off to create space.",min:0,max:32e3,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"summarySize",name:"Max Summary Size",title:"Maximum number of tokens allowed for summary response.",min:128,max:1024,step:1,type:"number",hide:n=>Tt(n).continuousChat!=="summary"},{key:"summaryExtend",name:"Summary Extend",title:"Number of times a truncated summary can be extended.",type:"select-number",options:[{value:0,text:"0 - Summary must fit in first call."},{value:1,text:"1 - Allow one extra API call to extend."},{value:2,text:"2 - Allow two extra API calls to extend."}],hide:n=>Tt(n).continuousChat!=="summary"},{key:"summaryTemperature",name:"Summary Temperature",title:"What sampling temperature to use, between 0 and 2, when generating summary. Lower values, like 0, will be more deterministic.",min:0,max:2,step:.1,type:"number",hide:n=>Tt(n).continuousChat!=="summary"},{key:"pinTop",name:"Keep First Prompts",title:"When we run out of space and need to remove prompts, the top number of prompts will not be removed after summarization/FIFO.",min:0,max:4,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"pinBottom",name:"Keep Bottom Prompts",title:"When we run out of space and need to remove prompts, do not remove or summarize the the last number prompts you set here.",min:0,max:20,step:1,type:"number",hide:n=>!Tt(n).continuousChat},{key:"summaryPrompt",name:"Summary Generation Prompt",title:"A prompt used to summarize past prompts.",placeholder:"Enter a prompt that will be used to summarize past prompts here.",type:"textarea",hide:n=>Tt(n).continuousChat!=="summary"},{key:"imageGenerationModel",name:"Image Generation Model",header:"Image Generation",headerClass:"is-info",title:"Prompt an image with: show me an image of ...",type:"select",options:[]}],gB={key:"model",name:"Model",title:"The model to use. Some may cost more than others.",header:n=>Ut(Tt(n).model).help,headerClass:"is-warning",options:[],type:"select",forceApi:!0,afterChange:(n,e)=>!0},Zs=[cB,...uB,...pB,gB,{key:"stream",name:"Stream Response",title:"Stream responses as they are generated.",type:"boolean",hide:bn},{key:"holdSocket",name:"Continue WebSocket",title:"Hold WebSocket connection open and try to re-use for each new chat message. Faster, but message delimitation could get mangled.",type:"boolean",hide:bn},{key:"temperature",name:"Sampling Temperature",title:`What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or top_p but not both.`,min:0,max:2,step:.1,type:"number"},{key:"top_p",name:"Nucleus Sampling (Top-p)",title:`An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. diff --git a/index.html b/index.html index 04327c6f..d6cfff6f 100644 --- a/index.html +++ b/index.html @@ -8,7 +8,7 @@ ChatGPT-web - +