mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-03 17:18:51 +01:00
✋ feat: Stop Sequences for Conversations & Presets (#2536)
* feat: `stop` conversation parameter * feat: Tag primitive * feat: dynamic tags * refactor: update tag styling * feat: add stop sequences to OpenAI settings * fix(Presentation): prevent `SidePanel` re-renders that flicker side panel * refactor: use stop placeholder * feat: type and schema update for `stop` and `TPreset` in generation param related types * refactor: pass conversation to dynamic settings * refactor(OpenAIClient): remove default handling for `modelOptions.stop` * docs: fix Google AI Setup formatting * feat: current_model * docs: WIP update * fix(ChatRoute): prevent default preset override before `hasSetConversation.current` becomes true by including latest conversation state as template * docs: update docs with more info on `stop` * chore: bump config_version * refactor: CURRENT_MODEL handling
This commit is contained in:
parent
4121818124
commit
099aa9dead
29 changed files with 690 additions and 93 deletions
|
|
@ -1,6 +1,7 @@
|
|||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
Constants,
|
||||
ImageDetail,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
|
|
@ -20,9 +21,9 @@ const {
|
|||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
createContextHandlers,
|
||||
CUT_OFF_PROMPT,
|
||||
titleInstruction,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
|
|
@ -200,16 +201,6 @@ class OpenAIClient extends BaseClient {
|
|||
|
||||
this.setupTokens();
|
||||
|
||||
if (!this.modelOptions.stop && !this.isVisionModel) {
|
||||
const stopTokens = [this.startToken];
|
||||
if (this.endToken && this.endToken !== this.startToken) {
|
||||
stopTokens.push(this.endToken);
|
||||
}
|
||||
stopTokens.push(`\n${this.userLabel}:`);
|
||||
stopTokens.push('<|diff_marker|>');
|
||||
this.modelOptions.stop = stopTokens;
|
||||
}
|
||||
|
||||
if (reverseProxy) {
|
||||
this.completionsUrl = reverseProxy;
|
||||
this.langchainProxy = extractBaseURL(reverseProxy);
|
||||
|
|
@ -729,7 +720,10 @@ class OpenAIClient extends BaseClient {
|
|||
|
||||
const { OPENAI_TITLE_MODEL } = process.env ?? {};
|
||||
|
||||
const model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
|
||||
const modelOptions = {
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
|
|
@ -851,7 +845,11 @@ ${convo}
|
|||
|
||||
// TODO: remove the gpt fallback and make it specific to endpoint
|
||||
const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {};
|
||||
const model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
|
||||
if (model === Constants.CURRENT_MODEL) {
|
||||
model = this.modelOptions.model;
|
||||
}
|
||||
|
||||
const maxContextTokens =
|
||||
getModelMaxTokens(
|
||||
model,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue