🔧 fix: Mistral type strictness for usage & update token values/windows (#6562)

* 🔧 fix: Resolve Mistral type strictness for OpenAI usage field

* chore: Enable usage tracking for Mistral endpoint in OpenAI configuration

* chore: Add new token values and context windows for latest premier Mistral models
This commit is contained in:
Danny Avila 2025-03-27 01:57:25 -04:00 committed by GitHub
parent 3ba7c4eb19
commit 7ca5650840
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 18 additions and 0 deletions

View file

@ -34,8 +34,14 @@ const mistralModels = {
'mistral-7b': 31990, // -10 from max
'mistral-small': 31990, // -10 from max
'mixtral-8x7b': 31990, // -10 from max
'mistral-large': 131000,
'mistral-large-2402': 127500,
'mistral-large-2407': 127500,
'pixtral-large': 131000,
'mistral-saba': 32000,
codestral: 256000,
'ministral-8b': 131000,
'ministral-3b': 131000,
};
const cohereModels = {