feat: support Xinference (#319)
### What problem does this PR solve? support xorbitsai inference as model provider Issue link:#299 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@@ -14,3 +14,5 @@ export const UserSettingIconMap = {
|
||||
};
|
||||
|
||||
export * from '@/constants/setting';
|
||||
|
||||
export const LocalLlmFactories = ['Ollama', 'Xinference'];
|
||||
|
||||
@@ -132,6 +132,7 @@ export const useSelectModelProvidersLoading = () => {
|
||||
|
||||
export const useSubmitOllama = () => {
|
||||
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
|
||||
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
|
||||
const addLlm = useAddLlm();
|
||||
const {
|
||||
visible: llmAddingVisible,
|
||||
@@ -149,11 +150,17 @@ export const useSubmitOllama = () => {
|
||||
[hideLlmAddingModal, addLlm],
|
||||
);
|
||||
|
||||
const handleShowLlmAddingModal = (llmFactory: string) => {
|
||||
setSelectedLlmFactory(llmFactory);
|
||||
showLlmAddingModal();
|
||||
};
|
||||
|
||||
return {
|
||||
llmAddingLoading: loading,
|
||||
onLlmAddingOk,
|
||||
llmAddingVisible,
|
||||
hideLlmAddingModal,
|
||||
showLlmAddingModal,
|
||||
showLlmAddingModal: handleShowLlmAddingModal,
|
||||
selectedLlmFactory,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
} from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import SettingTitle from '../components/setting-title';
|
||||
import { isLocalLlmFactory } from '../utils';
|
||||
import ApiKeyModal from './api-key-modal';
|
||||
import {
|
||||
useSelectModelProvidersLoading,
|
||||
@@ -43,6 +44,7 @@ const IconMap = {
|
||||
'ZHIPU-AI': 'zhipu',
|
||||
文心一言: 'wenxin',
|
||||
Ollama: 'ollama',
|
||||
Xinference: 'xinference',
|
||||
};
|
||||
|
||||
const LlmIcon = ({ name }: { name: string }) => {
|
||||
@@ -89,7 +91,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
|
||||
<Col span={12} className={styles.factoryOperationWrapper}>
|
||||
<Space size={'middle'}>
|
||||
<Button onClick={handleApiKeyClick}>
|
||||
{item.name === 'Ollama' ? t('addTheModel') : 'API-Key'}
|
||||
{isLocalLlmFactory(item.name) ? t('addTheModel') : 'API-Key'}
|
||||
<SettingOutlined />
|
||||
</Button>
|
||||
<Button onClick={handleShowMoreClick}>
|
||||
@@ -147,12 +149,13 @@ const UserSettingModel = () => {
|
||||
showLlmAddingModal,
|
||||
onLlmAddingOk,
|
||||
llmAddingLoading,
|
||||
selectedLlmFactory,
|
||||
} = useSubmitOllama();
|
||||
|
||||
const handleApiKeyClick = useCallback(
|
||||
(llmFactory: string) => {
|
||||
if (llmFactory === 'Ollama') {
|
||||
showLlmAddingModal();
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
showLlmAddingModal(llmFactory);
|
||||
} else {
|
||||
showApiKeyModal({ llm_factory: llmFactory });
|
||||
}
|
||||
@@ -161,8 +164,8 @@ const UserSettingModel = () => {
|
||||
);
|
||||
|
||||
const handleAddModel = (llmFactory: string) => () => {
|
||||
if (llmFactory === 'Ollama') {
|
||||
showLlmAddingModal();
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
showLlmAddingModal(llmFactory);
|
||||
} else {
|
||||
handleApiKeyClick(llmFactory);
|
||||
}
|
||||
@@ -252,6 +255,7 @@ const UserSettingModel = () => {
|
||||
hideModal={hideLlmAddingModal}
|
||||
onOk={onLlmAddingOk}
|
||||
loading={llmAddingLoading}
|
||||
llmFactory={selectedLlmFactory}
|
||||
></OllamaModal>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -13,7 +13,8 @@ const OllamaModal = ({
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
}: IModalProps<IAddLlmRequestBody>) => {
|
||||
llmFactory,
|
||||
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
|
||||
const [form] = Form.useForm<FieldType>();
|
||||
|
||||
const { t } = useTranslate('setting');
|
||||
@@ -28,7 +29,7 @@ const OllamaModal = ({
|
||||
const data = {
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: 'Ollama',
|
||||
llm_factory: llmFactory,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@@ -37,7 +38,7 @@ const OllamaModal = ({
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('addLlmTitle')}
|
||||
title={t('addLlmTitle', { name: llmFactory })}
|
||||
open={visible}
|
||||
onOk={handleOk}
|
||||
onCancel={hideModal}
|
||||
@@ -46,11 +47,11 @@ const OllamaModal = ({
|
||||
return (
|
||||
<Flex justify={'space-between'}>
|
||||
<a
|
||||
href="https://github.com/infiniflow/ragflow/blob/main/docs/ollama.md"
|
||||
href={`https://github.com/infiniflow/ragflow/blob/main/docs/${llmFactory.toLowerCase()}.md`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('ollamaLink')}
|
||||
{t('ollamaLink', { name: llmFactory })}
|
||||
</a>
|
||||
<Space>{originNode}</Space>
|
||||
</Flex>
|
||||
@@ -76,7 +77,7 @@ const OllamaModal = ({
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelName')}
|
||||
label={t(llmFactory === 'Xinference' ? 'modelUid' : 'modelName')}
|
||||
name="llm_name"
|
||||
rules={[{ required: true, message: t('modelNameMessage') }]}
|
||||
>
|
||||
|
||||
4
web/src/pages/user-setting/utils.ts
Normal file
4
web/src/pages/user-setting/utils.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { LocalLlmFactories } from './constants';
|
||||
|
||||
export const isLocalLlmFactory = (llmFactory: string) =>
|
||||
LocalLlmFactories.some((x) => x === llmFactory);
|
||||
Reference in New Issue
Block a user