feat: support Xinference (#319)

### What problem does this PR solve?

support xorbitsai inference as model provider

Issue link:#299

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
balibabu
2024-04-11 18:17:45 +08:00
committed by GitHub
parent 4fa768e733
commit cb2cbf500c
9 changed files with 77 additions and 19 deletions

View File

@@ -14,3 +14,5 @@ export const UserSettingIconMap = {
};
export * from '@/constants/setting';
export const LocalLlmFactories = ['Ollama', 'Xinference'];

View File

@@ -132,6 +132,7 @@ export const useSelectModelProvidersLoading = () => {
export const useSubmitOllama = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
const addLlm = useAddLlm();
const {
visible: llmAddingVisible,
@@ -149,11 +150,17 @@ export const useSubmitOllama = () => {
[hideLlmAddingModal, addLlm],
);
const handleShowLlmAddingModal = (llmFactory: string) => {
setSelectedLlmFactory(llmFactory);
showLlmAddingModal();
};
return {
llmAddingLoading: loading,
onLlmAddingOk,
llmAddingVisible,
hideLlmAddingModal,
showLlmAddingModal,
showLlmAddingModal: handleShowLlmAddingModal,
selectedLlmFactory,
};
};

View File

@@ -25,6 +25,7 @@ import {
} from 'antd';
import { useCallback } from 'react';
import SettingTitle from '../components/setting-title';
import { isLocalLlmFactory } from '../utils';
import ApiKeyModal from './api-key-modal';
import {
useSelectModelProvidersLoading,
@@ -43,6 +44,7 @@ const IconMap = {
'ZHIPU-AI': 'zhipu',
: 'wenxin',
Ollama: 'ollama',
Xinference: 'xinference',
};
const LlmIcon = ({ name }: { name: string }) => {
@@ -89,7 +91,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
<Col span={12} className={styles.factoryOperationWrapper}>
<Space size={'middle'}>
<Button onClick={handleApiKeyClick}>
{item.name === 'Ollama' ? t('addTheModel') : 'API-Key'}
{isLocalLlmFactory(item.name) ? t('addTheModel') : 'API-Key'}
<SettingOutlined />
</Button>
<Button onClick={handleShowMoreClick}>
@@ -147,12 +149,13 @@ const UserSettingModel = () => {
showLlmAddingModal,
onLlmAddingOk,
llmAddingLoading,
selectedLlmFactory,
} = useSubmitOllama();
const handleApiKeyClick = useCallback(
(llmFactory: string) => {
if (llmFactory === 'Ollama') {
showLlmAddingModal();
if (isLocalLlmFactory(llmFactory)) {
showLlmAddingModal(llmFactory);
} else {
showApiKeyModal({ llm_factory: llmFactory });
}
@@ -161,8 +164,8 @@ const UserSettingModel = () => {
);
const handleAddModel = (llmFactory: string) => () => {
if (llmFactory === 'Ollama') {
showLlmAddingModal();
if (isLocalLlmFactory(llmFactory)) {
showLlmAddingModal(llmFactory);
} else {
handleApiKeyClick(llmFactory);
}
@@ -252,6 +255,7 @@ const UserSettingModel = () => {
hideModal={hideLlmAddingModal}
onOk={onLlmAddingOk}
loading={llmAddingLoading}
llmFactory={selectedLlmFactory}
></OllamaModal>
</>
);

View File

@@ -13,7 +13,8 @@ const OllamaModal = ({
hideModal,
onOk,
loading,
}: IModalProps<IAddLlmRequestBody>) => {
llmFactory,
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
const [form] = Form.useForm<FieldType>();
const { t } = useTranslate('setting');
@@ -28,7 +29,7 @@ const OllamaModal = ({
const data = {
...omit(values, ['vision']),
model_type: modelType,
llm_factory: 'Ollama',
llm_factory: llmFactory,
};
console.info(data);
@@ -37,7 +38,7 @@ const OllamaModal = ({
return (
<Modal
title={t('addLlmTitle')}
title={t('addLlmTitle', { name: llmFactory })}
open={visible}
onOk={handleOk}
onCancel={hideModal}
@@ -46,11 +47,11 @@ const OllamaModal = ({
return (
<Flex justify={'space-between'}>
<a
href="https://github.com/infiniflow/ragflow/blob/main/docs/ollama.md"
href={`https://github.com/infiniflow/ragflow/blob/main/docs/${llmFactory.toLowerCase()}.md`}
target="_blank"
rel="noreferrer"
>
{t('ollamaLink')}
{t('ollamaLink', { name: llmFactory })}
</a>
<Space>{originNode}</Space>
</Flex>
@@ -76,7 +77,7 @@ const OllamaModal = ({
</Select>
</Form.Item>
<Form.Item<FieldType>
label={t('modelName')}
label={t(llmFactory === 'Xinference' ? 'modelUid' : 'modelName')}
name="llm_name"
rules={[{ required: true, message: t('modelNameMessage') }]}
>

View File

@@ -0,0 +1,4 @@
import { LocalLlmFactories } from './constants';
export const isLocalLlmFactory = (llmFactory: string) =>
LocalLlmFactories.some((x) => x === llmFactory);