diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 79d485562bb..bbd14d61337 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -50,6 +50,7 @@ export class ChatGPTApi implements LLMApi {
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
+ top_p: modelConfig.top_p,
};
console.log("[Request] openai payload: ", requestPayload);
diff --git a/app/components/chat.tsx b/app/components/chat.tsx
index ff0bc5b347d..a0b0a297a0b 100644
--- a/app/components/chat.tsx
+++ b/app/components/chat.tsx
@@ -888,7 +888,8 @@ export function Chat() {
const showActions =
!isUser &&
i > 0 &&
- !(message.preview || message.content.length === 0);
+ !(message.preview || message.content.length === 0) &&
+ i >= context.length; // do not show actions for context prompts
const showTyping = message.preview || message.streaming;
const shouldShowClearContextDivider = i === clearContextIndex - 1;
diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx
index f79e0e8f6f1..9fd4677e72b 100644
--- a/app/components/model-config.tsx
+++ b/app/components/model-config.tsx
@@ -48,6 +48,25 @@ export function ModelConfigList(props: {
}}
>
+
+ {
+ props.updateConfig(
+ (config) =>
+ (config.temperature = ModalConfigValidator.top_p(
+ e.currentTarget.valueAsNumber,
+ )),
+ );
+ }}
+ >
+
`With ${x} contextual prompts`,
- Edit: "Contextual and Memory Prompts",
+ Edit: "Current Chat Settings",
Add: "Add a Prompt",
Clear: "Context Cleared",
Revert: "Revert",
diff --git a/app/store/config.ts b/app/store/config.ts
index 945e1be7c45..68e299150ef 100644
--- a/app/store/config.ts
+++ b/app/store/config.ts
@@ -33,6 +33,7 @@ export const DEFAULT_CONFIG = {
modelConfig: {
model: "gpt-3.5-turbo" as ModelType,
temperature: 0.5,
+ top_p: 1,
max_tokens: 2000,
presence_penalty: 0,
frequency_penalty: 0,
@@ -158,6 +159,9 @@ export const ModalConfigValidator = {
temperature(x: number) {
return limitNumber(x, 0, 1, 1);
},
+ top_p(x: number) {
+ return limitNumber(x, 0, 1, 1);
+ },
};
export const useAppConfig = create()(
@@ -177,15 +181,16 @@ export const useAppConfig = create()(
}),
{
name: StoreKey.Config,
- version: 3.2,
+ version: 3.3,
migrate(persistedState, version) {
- if (version === 3.2) return persistedState as any;
+ if (version === 3.3) return persistedState as any;
const state = persistedState as ChatConfig;
state.modelConfig.sendMemory = true;
state.modelConfig.historyMessageCount = 4;
state.modelConfig.compressMessageLengthThreshold = 1000;
state.modelConfig.frequency_penalty = 0;
+ state.modelConfig.top_p = 1;
state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
state.dontShowMaskSplashScreen = false;
diff --git a/app/store/prompt.ts b/app/store/prompt.ts
index 98d4193bec9..4e370161948 100644
--- a/app/store/prompt.ts
+++ b/app/store/prompt.ts
@@ -127,7 +127,7 @@ export const usePromptStore = create()(
search(text) {
if (text.length === 0) {
// return all rompts
- return SearchService.allPrompts.concat([...get().getUserPrompts()]);
+ return get().getUserPrompts().concat(SearchService.builtinPrompts);
}
return SearchService.search(text) as Prompt[];
},