mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
✨ feat: implement search parameter updates (#7151)
* feat: implement search parameter updates * Update url params when values change reset params on new chat move logic to families.ts revert unchanged files --------- Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
parent
f7777a2723
commit
4af72aac9b
5 changed files with 481 additions and 4 deletions
|
|
@ -95,7 +95,7 @@ export default function useQueryParams({
|
|||
const settingsTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
const methods = useChatFormContext();
|
||||
const [searchParams] = useSearchParams();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const getDefaultConversation = useDefaultConvo();
|
||||
const modularChat = useRecoilValue(store.modularChat);
|
||||
const availableTools = useRecoilValue(store.availableTools);
|
||||
|
|
@ -332,6 +332,12 @@ export default function useQueryParams({
|
|||
|
||||
/** Mark processing as complete and clean up as needed */
|
||||
const success = () => {
|
||||
const currentParams = new URLSearchParams(searchParams.toString());
|
||||
currentParams.delete('prompt');
|
||||
currentParams.delete('q');
|
||||
currentParams.delete('submit');
|
||||
|
||||
setSearchParams(currentParams, { replace: true });
|
||||
processedRef.current = true;
|
||||
console.log('Parameters processed successfully');
|
||||
clearInterval(intervalId);
|
||||
|
|
@ -407,6 +413,7 @@ export default function useQueryParams({
|
|||
newQueryConvo,
|
||||
newConversation,
|
||||
submitMessage,
|
||||
setSearchParams,
|
||||
queryClient,
|
||||
processSubmission,
|
||||
]);
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ import { LocalStorageKeys, Constants } from 'librechat-data-provider';
|
|||
import type { TMessage, TPreset, TConversation, TSubmission } from 'librechat-data-provider';
|
||||
import type { TOptionSettings, ExtendedFile } from '~/common';
|
||||
import { useSetConvoContext } from '~/Providers/SetConvoContext';
|
||||
import { storeEndpointSettings, logger } from '~/utils';
|
||||
import { storeEndpointSettings, logger, createChatSearchParams } from '~/utils';
|
||||
import { createSearchParams } from 'react-router-dom';
|
||||
|
||||
const latestMessageKeysAtom = atom<(string | number)[]>({
|
||||
key: 'latestMessageKeys',
|
||||
|
|
@ -73,9 +74,9 @@ const conversationByIndex = atomFamily<TConversation | null, string | number>({
|
|||
default: null,
|
||||
effects: [
|
||||
({ onSet, node }) => {
|
||||
onSet(async (newValue) => {
|
||||
onSet(async (newValue, oldValue) => {
|
||||
const index = Number(node.key.split('__')[1]);
|
||||
logger.log('conversation', 'Setting conversation:', { index, newValue });
|
||||
logger.log('conversation', 'Setting conversation:', { index, newValue, oldValue });
|
||||
if (newValue?.assistant_id != null && newValue.assistant_id) {
|
||||
localStorage.setItem(
|
||||
`${LocalStorageKeys.ASST_ID_PREFIX}${index}${newValue.endpoint}`,
|
||||
|
|
@ -104,6 +105,18 @@ const conversationByIndex = atomFamily<TConversation | null, string | number>({
|
|||
`${LocalStorageKeys.LAST_CONVO_SETUP}_${index}`,
|
||||
JSON.stringify(newValue),
|
||||
);
|
||||
|
||||
const shouldUpdateParams =
|
||||
newValue.createdAt === '' &&
|
||||
JSON.stringify(newValue) !== JSON.stringify(oldValue) &&
|
||||
(oldValue as TConversation)?.conversationId === 'new';
|
||||
|
||||
if (shouldUpdateParams) {
|
||||
const newParams = createChatSearchParams(newValue);
|
||||
const searchParams = createSearchParams(newParams);
|
||||
const url = `${window.location.pathname}?${searchParams.toString()}`;
|
||||
window.history.pushState({}, '', url);
|
||||
}
|
||||
});
|
||||
},
|
||||
] as const,
|
||||
|
|
|
|||
363
client/src/utils/createChatSearchParams.spec.ts
Normal file
363
client/src/utils/createChatSearchParams.spec.ts
Normal file
|
|
@ -0,0 +1,363 @@
|
|||
import { EModelEndpoint, Constants } from 'librechat-data-provider';
|
||||
import type { TConversation, TPreset } from 'librechat-data-provider';
|
||||
import createChatSearchParams from './createChatSearchParams';
|
||||
|
||||
describe('createChatSearchParams', () => {
|
||||
describe('conversation inputs', () => {
|
||||
it('handles basic conversation properties', () => {
|
||||
const conversation: Partial<TConversation> = {
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(conversation as TConversation);
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(result.get('model')).toBe('gpt-4');
|
||||
expect(result.get('temperature')).toBe('0.7');
|
||||
});
|
||||
|
||||
it('applies only the endpoint property when other conversation fields are absent', () => {
|
||||
const endpointOnly = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
} as TConversation);
|
||||
expect(endpointOnly.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(endpointOnly.has('model')).toBe(false);
|
||||
expect(endpointOnly.has('endpoint')).toBe(true);
|
||||
});
|
||||
|
||||
it('applies only the model property when other conversation fields are absent', () => {
|
||||
const modelOnly = createChatSearchParams({ model: 'gpt-4' } as TConversation);
|
||||
expect(modelOnly.has('endpoint')).toBe(false);
|
||||
expect(modelOnly.get('model')).toBe('gpt-4');
|
||||
expect(modelOnly.has('model')).toBe(true);
|
||||
});
|
||||
|
||||
it('includes assistant_id when endpoint is assistants', () => {
|
||||
const withAssistantId = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
model: 'gpt-4',
|
||||
assistant_id: 'asst_123',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(withAssistantId.get('assistant_id')).toBe('asst_123');
|
||||
expect(withAssistantId.has('endpoint')).toBe(false);
|
||||
expect(withAssistantId.has('model')).toBe(false);
|
||||
expect(withAssistantId.has('temperature')).toBe(false);
|
||||
});
|
||||
|
||||
it('includes agent_id when endpoint is agents', () => {
|
||||
const withAgentId = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'gpt-4',
|
||||
agent_id: 'agent_123',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(withAgentId.get('agent_id')).toBe('agent_123');
|
||||
expect(withAgentId.has('endpoint')).toBe(false);
|
||||
expect(withAgentId.has('model')).toBe(false);
|
||||
expect(withAgentId.has('temperature')).toBe(false);
|
||||
});
|
||||
|
||||
it('excludes all parameters except assistant_id when endpoint is assistants', () => {
|
||||
const withAssistantId = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
model: 'gpt-4',
|
||||
assistant_id: 'asst_123',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(withAssistantId.get('assistant_id')).toBe('asst_123');
|
||||
expect(withAssistantId.has('endpoint')).toBe(false);
|
||||
expect(withAssistantId.has('model')).toBe(false);
|
||||
expect(withAssistantId.has('temperature')).toBe(false);
|
||||
expect([...withAssistantId.entries()].length).toBe(1);
|
||||
});
|
||||
|
||||
it('excludes all parameters except agent_id when endpoint is agents', () => {
|
||||
const withAgentId = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'gpt-4',
|
||||
agent_id: 'agent_123',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(withAgentId.get('agent_id')).toBe('agent_123');
|
||||
expect(withAgentId.has('endpoint')).toBe(false);
|
||||
expect(withAgentId.has('model')).toBe(false);
|
||||
expect(withAgentId.has('temperature')).toBe(false);
|
||||
expect([...withAgentId.entries()].length).toBe(1);
|
||||
});
|
||||
|
||||
it('returns empty params when agent endpoint has no agent_id', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(result.toString()).toBe('');
|
||||
expect([...result.entries()].length).toBe(0);
|
||||
});
|
||||
|
||||
it('returns empty params when assistants endpoint has no assistant_id', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
expect(result.toString()).toBe('');
|
||||
expect([...result.entries()].length).toBe(0);
|
||||
});
|
||||
|
||||
it('ignores agent_id when it matches EPHEMERAL_AGENT_ID', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'gpt-4',
|
||||
agent_id: Constants.EPHEMERAL_AGENT_ID,
|
||||
temperature: 0.7,
|
||||
} as TConversation);
|
||||
|
||||
// The agent_id is ignored but other params are still included
|
||||
expect(result.has('agent_id')).toBe(false);
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.agents);
|
||||
expect(result.get('model')).toBe('gpt-4');
|
||||
expect(result.get('temperature')).toBe('0.7');
|
||||
});
|
||||
|
||||
it('handles stop arrays correctly by joining with commas', () => {
|
||||
const withStopArray = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
stop: ['stop1', 'stop2'],
|
||||
} as TConversation);
|
||||
|
||||
expect(withStopArray.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(withStopArray.get('model')).toBe('gpt-4');
|
||||
expect(withStopArray.get('stop')).toBe('stop1,stop2');
|
||||
});
|
||||
|
||||
it('filters out non-supported array properties', () => {
|
||||
const withOtherArray = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
otherArrayProp: ['value1', 'value2'],
|
||||
} as any);
|
||||
|
||||
expect(withOtherArray.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(withOtherArray.get('model')).toBe('gpt-4');
|
||||
expect(withOtherArray.has('otherArrayProp')).toBe(false);
|
||||
});
|
||||
|
||||
it('includes empty arrays in output params', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
stop: [],
|
||||
});
|
||||
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(result.has('stop')).toBe(true);
|
||||
expect(result.get('stop')).toBe('');
|
||||
});
|
||||
|
||||
it('handles non-stop arrays correctly in paramMap', () => {
|
||||
const conversation: any = {
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
top_p: ['0.7', '0.8'],
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(conversation);
|
||||
|
||||
const expectedJson = JSON.stringify(['0.7', '0.8']);
|
||||
expect(result.get('top_p')).toBe(expectedJson);
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(result.get('model')).toBe('gpt-4');
|
||||
});
|
||||
|
||||
it('includes empty non-stop arrays as serialized empty arrays', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
top_p: [],
|
||||
} as any);
|
||||
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(result.get('model')).toBe('gpt-4');
|
||||
expect(result.get('temperature')).toBe('0.7');
|
||||
expect(result.has('top_p')).toBe(true);
|
||||
expect(result.get('top_p')).toBe('[]');
|
||||
});
|
||||
|
||||
it('excludes parameters with null or undefined values from the output', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
top_p: undefined,
|
||||
presence_penalty: undefined,
|
||||
frequency_penalty: null,
|
||||
} as any);
|
||||
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.openAI);
|
||||
expect(result.get('model')).toBe('gpt-4');
|
||||
expect(result.get('temperature')).toBe('0.7');
|
||||
expect(result.has('top_p')).toBe(false);
|
||||
expect(result.has('presence_penalty')).toBe(false);
|
||||
expect(result.has('frequency_penalty')).toBe(false);
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('handles float parameter values correctly', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.google,
|
||||
model: 'gemini-pro',
|
||||
frequency_penalty: 0.25,
|
||||
temperature: 0.75,
|
||||
});
|
||||
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.google);
|
||||
expect(result.get('model')).toBe('gemini-pro');
|
||||
expect(result.get('frequency_penalty')).toBe('0.25');
|
||||
expect(result.get('temperature')).toBe('0.75');
|
||||
});
|
||||
|
||||
it('handles integer parameter values correctly', () => {
|
||||
const result = createChatSearchParams({
|
||||
endpoint: EModelEndpoint.google,
|
||||
model: 'gemini-pro',
|
||||
topK: 40,
|
||||
maxOutputTokens: 2048,
|
||||
});
|
||||
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.google);
|
||||
expect(result.get('model')).toBe('gemini-pro');
|
||||
expect(result.get('topK')).toBe('40');
|
||||
expect(result.get('maxOutputTokens')).toBe('2048');
|
||||
});
|
||||
});
|
||||
|
||||
describe('preset inputs', () => {
|
||||
it('handles preset objects correctly', () => {
|
||||
const preset: Partial<TPreset> = {
|
||||
endpoint: EModelEndpoint.google,
|
||||
model: 'gemini-pro',
|
||||
temperature: 0.5,
|
||||
topP: 0.8,
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(preset as TPreset);
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.google);
|
||||
expect(result.get('model')).toBe('gemini-pro');
|
||||
expect(result.get('temperature')).toBe('0.5');
|
||||
expect(result.get('topP')).toBe('0.8');
|
||||
});
|
||||
|
||||
it('returns only spec param when spec property is present', () => {
|
||||
const preset: Partial<TPreset> = {
|
||||
endpoint: EModelEndpoint.google,
|
||||
model: 'gemini-pro',
|
||||
temperature: 0.5,
|
||||
spec: 'special_spec',
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(preset as TPreset);
|
||||
expect(result.get('spec')).toBe('special_spec');
|
||||
expect(result.has('endpoint')).toBe(false);
|
||||
expect(result.has('model')).toBe(false);
|
||||
expect(result.has('temperature')).toBe(false);
|
||||
expect([...result.entries()].length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('record inputs', () => {
|
||||
it('includes allowed parameters from Record inputs', () => {
|
||||
const record: Record<string, any> = {
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
model: 'claude-2',
|
||||
temperature: '0.8',
|
||||
top_p: '0.95',
|
||||
extraParam: 'should-not-be-included',
|
||||
invalidParam1: 'value1',
|
||||
invalidParam2: 'value2',
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(record);
|
||||
expect(result.get('endpoint')).toBe(EModelEndpoint.anthropic);
|
||||
expect(result.get('model')).toBe('claude-2');
|
||||
expect(result.get('temperature')).toBe('0.8');
|
||||
expect(result.get('top_p')).toBe('0.95');
|
||||
});
|
||||
|
||||
it('excludes disallowed parameters from Record inputs', () => {
|
||||
const record: Record<string, any> = {
|
||||
endpoint: EModelEndpoint.anthropic,
|
||||
model: 'claude-2',
|
||||
extraParam: 'should-not-be-included',
|
||||
invalidParam1: 'value1',
|
||||
invalidParam2: 'value2',
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(record);
|
||||
expect(result.has('extraParam')).toBe(false);
|
||||
expect(result.has('invalidParam1')).toBe(false);
|
||||
expect(result.has('invalidParam2')).toBe(false);
|
||||
expect(result.toString().includes('invalidParam')).toBe(false);
|
||||
expect(result.toString().includes('extraParam')).toBe(false);
|
||||
});
|
||||
|
||||
it('includes valid values from Record inputs', () => {
|
||||
const record: Record<string, any> = {
|
||||
temperature: '0.7',
|
||||
top_p: null,
|
||||
frequency_penalty: undefined,
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(record);
|
||||
expect(result.get('temperature')).toBe('0.7');
|
||||
});
|
||||
|
||||
it('excludes null or undefined values from Record inputs', () => {
|
||||
const record: Record<string, any> = {
|
||||
temperature: '0.7',
|
||||
top_p: null,
|
||||
frequency_penalty: undefined,
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(record);
|
||||
expect(result.has('top_p')).toBe(false);
|
||||
expect(result.has('frequency_penalty')).toBe(false);
|
||||
});
|
||||
|
||||
it('handles generic object without endpoint or model properties', () => {
|
||||
const customObject = {
|
||||
temperature: '0.5',
|
||||
top_p: '0.7',
|
||||
customProperty: 'value',
|
||||
};
|
||||
|
||||
const result = createChatSearchParams(customObject);
|
||||
expect(result.get('temperature')).toBe('0.5');
|
||||
expect(result.get('top_p')).toBe('0.7');
|
||||
expect(result.has('customProperty')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('returns an empty URLSearchParams instance when input is null', () => {
|
||||
const result = createChatSearchParams(null);
|
||||
expect(result.toString()).toBe('');
|
||||
expect(result instanceof URLSearchParams).toBe(true);
|
||||
});
|
||||
|
||||
it('returns an empty URLSearchParams instance for an empty object input', () => {
|
||||
const result = createChatSearchParams({});
|
||||
expect(result.toString()).toBe('');
|
||||
expect(result instanceof URLSearchParams).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
93
client/src/utils/createChatSearchParams.ts
Normal file
93
client/src/utils/createChatSearchParams.ts
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
import { isAgentsEndpoint, isAssistantsEndpoint, Constants } from 'librechat-data-provider';
|
||||
import type { TConversation, TPreset } from 'librechat-data-provider';
|
||||
|
||||
export default function createChatSearchParams(
|
||||
input: TConversation | TPreset | Record<string, string> | null,
|
||||
): URLSearchParams {
|
||||
if (input == null) {
|
||||
return new URLSearchParams();
|
||||
}
|
||||
|
||||
const params = new URLSearchParams();
|
||||
|
||||
const allowedParams = [
|
||||
'endpoint',
|
||||
'model',
|
||||
'temperature',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'stop',
|
||||
'top_p',
|
||||
'max_tokens',
|
||||
'topP',
|
||||
'topK',
|
||||
'maxOutputTokens',
|
||||
'promptCache',
|
||||
'region',
|
||||
'maxTokens',
|
||||
'agent_id',
|
||||
'assistant_id',
|
||||
];
|
||||
|
||||
if (input && typeof input === 'object' && !('endpoint' in input) && !('model' in input)) {
|
||||
Object.entries(input as Record<string, string>).forEach(([key, value]) => {
|
||||
if (value != null && allowedParams.includes(key)) {
|
||||
params.set(key, value);
|
||||
}
|
||||
});
|
||||
return params;
|
||||
}
|
||||
|
||||
const conversation = input as TConversation | TPreset;
|
||||
const endpoint = conversation.endpoint;
|
||||
if (conversation.spec) {
|
||||
return new URLSearchParams({ spec: conversation.spec });
|
||||
}
|
||||
if (
|
||||
isAgentsEndpoint(endpoint) &&
|
||||
conversation.agent_id &&
|
||||
conversation.agent_id !== Constants.EPHEMERAL_AGENT_ID
|
||||
) {
|
||||
return new URLSearchParams({ agent_id: String(conversation.agent_id) });
|
||||
} else if (isAssistantsEndpoint(endpoint) && conversation.assistant_id) {
|
||||
return new URLSearchParams({ assistant_id: String(conversation.assistant_id) });
|
||||
} else if (isAgentsEndpoint(endpoint) && !conversation.agent_id) {
|
||||
return params;
|
||||
} else if (isAssistantsEndpoint(endpoint) && !conversation.assistant_id) {
|
||||
return params;
|
||||
}
|
||||
|
||||
if (endpoint) {
|
||||
params.set('endpoint', endpoint);
|
||||
}
|
||||
if (conversation.model) {
|
||||
params.set('model', conversation.model);
|
||||
}
|
||||
|
||||
const paramMap = {
|
||||
temperature: conversation.temperature,
|
||||
presence_penalty: conversation.presence_penalty,
|
||||
frequency_penalty: conversation.frequency_penalty,
|
||||
stop: conversation.stop,
|
||||
top_p: conversation.top_p,
|
||||
max_tokens: conversation.max_tokens,
|
||||
topP: conversation.topP,
|
||||
topK: conversation.topK,
|
||||
maxOutputTokens: conversation.maxOutputTokens,
|
||||
promptCache: conversation.promptCache,
|
||||
region: conversation.region,
|
||||
maxTokens: conversation.maxTokens,
|
||||
};
|
||||
|
||||
return Object.entries(paramMap).reduce((params, [key, value]) => {
|
||||
if (value != null) {
|
||||
if (Array.isArray(value)) {
|
||||
params.set(key, key === 'stop' ? value.join(',') : JSON.stringify(value));
|
||||
} else {
|
||||
params.set(key, String(value));
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}, params);
|
||||
}
|
||||
|
|
@ -22,6 +22,7 @@ export { default as getLoginError } from './getLoginError';
|
|||
export { default as cleanupPreset } from './cleanupPreset';
|
||||
export { default as buildDefaultConvo } from './buildDefaultConvo';
|
||||
export { default as getDefaultEndpoint } from './getDefaultEndpoint';
|
||||
export { default as createChatSearchParams } from './createChatSearchParams';
|
||||
|
||||
export const languages = [
|
||||
'java',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue