🍞 fix: Minor fixes and improved Bun support (#1916)

* fix(bun): fix bun compatibility to allow gzip header: https://github.com/oven-sh/bun/issues/267#issuecomment-1854460357

* chore: update custom config examples

* fix(OpenAIClient.chatCompletion): remove redundant call of stream.controller.abort() as `break` aborts the request and prevents abort errors when not called redundantly

* chore: bump bun.lockb

* fix: remove result-thinking class when message is no longer streaming

* fix(bun): improve Bun support by forcing use of old method in bun env, also update old methods with new customizable params

* fix(ci): pass tests
This commit is contained in:
Danny Avila 2024-02-27 17:51:16 -05:00 committed by GitHub
parent 5d887492ea
commit c37d5568bf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 175 additions and 59 deletions

View file

@ -68,26 +68,26 @@ endpoints:
titleConvo: true # Set to true to enable title conversation
# Title Method: Choose between "completion" or "functions".
titleMethod: "completion" # Defaults to "completion" if omitted.
# titleMethod: "completion" # Defaults to "completion" if omitted.
# Title Model: Specify the model to use for titles.
titleModel: "mistral-tiny" # Defaults to "gpt-3.5-turbo" if omitted.
# Summarize setting: Set to true to enable summarization.
summarize: false
# summarize: false
# Summary Model: Specify the model to use if summarization is enabled.
summaryModel: "mistral-tiny" # Defaults to "gpt-3.5-turbo" if omitted.
# summaryModel: "mistral-tiny" # Defaults to "gpt-3.5-turbo" if omitted.
# Force Prompt setting: If true, sends a `prompt` parameter instead of `messages`.
forcePrompt: false
# forcePrompt: false
# The label displayed for the AI model in messages.
modelDisplayLabel: "Mistral" # Default is "AI" when not set.
# Add additional parameters to the request. Default params will be overwritten.
addParams:
safe_prompt: true # This field is specific to Mistral AI: https://docs.mistral.ai/api/
# addParams:
# safe_prompt: true # This field is specific to Mistral AI: https://docs.mistral.ai/api/
# Drop Default params parameters from the request. See default params in guide linked below.
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
@ -105,9 +105,8 @@ endpoints:
fetch: true
titleConvo: true
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
# Recommended: Drop the stop parameter from the request as Openrouter models use a variety of stop tokens.
dropParams: ["stop"]
modelDisplayLabel: "OpenRouter"
# See the Custom Configuration Guide for more information: