⬇️ feat: Assistant File Downloads (#2234)

* WIP: basic route for file downloads and file strategy for generating readablestream to pipe as res

* chore(DALLE3): add typing for OpenAI client

* chore: add `CONSOLE_JSON` notes to dotenv.md

* WIP: first pass OpenAI Assistants File Output handling

* feat: first pass assistants output file download from openai

* chore: yml vs. yaml variation to .gitignore for `librechat.yml`

* refactor(retrieveAndProcessFile): remove redundancies

* fix(syncMessages): explicit sort of apiMessages to fix message order on abort

* chore: add logs for warnings and errors, show toast on frontend

* chore: add logger where console was still being used
This commit is contained in:
Danny Avila 2024-03-29 08:23:38 -04:00 committed by GitHub
parent 7945fea0f9
commit a00756c469
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 555 additions and 248 deletions

View file

@ -1,9 +1,10 @@
import { ContentTypes } from 'librechat-data-provider';
import type {
TSubmission,
Text,
TMessage,
TContentData,
TSubmission,
ContentPart,
TContentData,
TMessageContentParts,
} from 'librechat-data-provider';
import { useCallback, useMemo } from 'react';
@ -46,9 +47,9 @@ export default function useContentHandler({ setMessages, getMessages }: TUseCont
}
// TODO: handle streaming for non-text
const part: ContentPart = data[ContentTypes.TEXT]
? { value: data[ContentTypes.TEXT] }
: data[type];
const textPart: Text | string = data[ContentTypes.TEXT];
const part: ContentPart =
textPart && typeof textPart === 'string' ? { value: textPart } : data[type];
/* spreading the content array to avoid mutation */
response.content = [...(response.content ?? [])];