mirror of
https://gitee.com/infiniflow/ragflow.git
synced 2025-12-06 07:19:03 +08:00
Fix: Newly added models to OpenAI-API-Compatible are not displayed in the LLM dropdown menu in a timely manner. #11774 (#11775)
### What problem does this PR solve? Fix: Newly added models to OpenAI-API-Compatible are not displayed in the LLM dropdown menu in a timely manner. #11774 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@@ -24,6 +24,15 @@ import { buildLlmUuid } from '@/utils/llm-util';
|
||||
|
||||
export const enum LLMApiAction {
|
||||
LlmList = 'llmList',
|
||||
MyLlmList = 'myLlmList',
|
||||
MyLlmListDetailed = 'myLlmListDetailed',
|
||||
FactoryList = 'factoryList',
|
||||
SaveApiKey = 'saveApiKey',
|
||||
SaveTenantInfo = 'saveTenantInfo',
|
||||
AddLlm = 'addLlm',
|
||||
DeleteLlm = 'deleteLlm',
|
||||
EnableLlm = 'enableLlm',
|
||||
DeleteFactory = 'deleteFactory',
|
||||
}
|
||||
|
||||
export const useFetchLlmList = (modelType?: LlmModelType) => {
|
||||
@@ -177,7 +186,7 @@ export const useComposeLlmOptionsByModelTypes = (
|
||||
|
||||
export const useFetchLlmFactoryList = (): ResponseGetType<IFactory[]> => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['factoryList'],
|
||||
queryKey: [LLMApiAction.FactoryList],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
@@ -196,7 +205,7 @@ export const useFetchMyLlmList = (): ResponseGetType<
|
||||
Record<string, IMyLlmValue>
|
||||
> => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['myLlmList'],
|
||||
queryKey: [LLMApiAction.MyLlmList],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
@@ -213,7 +222,7 @@ export const useFetchMyLlmListDetailed = (): ResponseGetType<
|
||||
Record<string, any>
|
||||
> => {
|
||||
const { data, isFetching: loading } = useQuery({
|
||||
queryKey: ['myLlmListDetailed'],
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
initialData: {},
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
@@ -271,14 +280,16 @@ export const useSaveApiKey = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['saveApiKey'],
|
||||
mutationKey: [LLMApiAction.SaveApiKey],
|
||||
mutationFn: async (params: IApiKeySavingParams) => {
|
||||
const { data } = await userService.set_api_key(params);
|
||||
if (data.code === 0) {
|
||||
message.success(t('message.modified'));
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.MyLlmList] });
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
});
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.FactoryList] });
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
@@ -303,7 +314,7 @@ export const useSaveTenantInfo = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['saveTenantInfo'],
|
||||
mutationKey: [LLMApiAction.SaveTenantInfo],
|
||||
mutationFn: async (params: ISystemModelSettingSavingParams) => {
|
||||
const { data } = await userService.set_tenant_info(params);
|
||||
if (data.code === 0) {
|
||||
@@ -324,13 +335,16 @@ export const useAddLlm = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['addLlm'],
|
||||
mutationKey: [LLMApiAction.AddLlm],
|
||||
mutationFn: async (params: IAddLlmRequestBody) => {
|
||||
const { data } = await userService.add_llm(params);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.MyLlmList] });
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
});
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.FactoryList] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.LlmList] });
|
||||
message.success(t('message.modified'));
|
||||
}
|
||||
return data.code;
|
||||
@@ -348,13 +362,15 @@ export const useDeleteLlm = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteLlm'],
|
||||
mutationKey: [LLMApiAction.DeleteLlm],
|
||||
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
||||
const { data } = await userService.delete_llm(params);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.MyLlmList] });
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
});
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.FactoryList] });
|
||||
message.success(t('message.deleted'));
|
||||
}
|
||||
return data.code;
|
||||
@@ -372,7 +388,7 @@ export const useEnableLlm = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['enableLlm'],
|
||||
mutationKey: [LLMApiAction.EnableLlm],
|
||||
mutationFn: async (params: IDeleteLlmRequestBody & { enable: boolean }) => {
|
||||
const reqParam: IDeleteLlmRequestBody & {
|
||||
enable?: boolean;
|
||||
@@ -381,9 +397,11 @@ export const useEnableLlm = () => {
|
||||
delete reqParam.enable;
|
||||
const { data } = await userService.enable_llm(reqParam);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.MyLlmList] });
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
});
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.FactoryList] });
|
||||
message.success(t('message.modified'));
|
||||
}
|
||||
return data.code;
|
||||
@@ -401,14 +419,16 @@ export const useDeleteFactory = () => {
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: ['deleteFactory'],
|
||||
mutationKey: [LLMApiAction.DeleteFactory],
|
||||
mutationFn: async (params: IDeleteLlmRequestBody) => {
|
||||
const { data } = await userService.deleteFactory(params);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
|
||||
queryClient.invalidateQueries({ queryKey: ['llmList'] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.MyLlmList] });
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [LLMApiAction.MyLlmListDetailed],
|
||||
});
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.FactoryList] });
|
||||
queryClient.invalidateQueries({ queryKey: [LLMApiAction.LlmList] });
|
||||
message.success(t('message.deleted'));
|
||||
}
|
||||
return data.code;
|
||||
|
||||
Reference in New Issue
Block a user