balibabu
commited on
Commit
·
e693841
1
Parent(s):
4ba8b01
feat: support Xinference (#319)
Browse files### What problem does this PR solve?
support xorbitsai inference as model provider
Issue link:#299
### Type of change
- [x] New Feature (non-breaking change which adds functionality)
- web/src/assets/svg/enable.svg +4 -5
- web/src/assets/svg/llm/xinference.svg +39 -0
- web/src/locales/en.ts +2 -1
- web/src/locales/zh.ts +2 -1
- web/src/pages/user-setting/constants.tsx +2 -0
- web/src/pages/user-setting/setting-model/hooks.ts +8 -1
- web/src/pages/user-setting/setting-model/index.tsx +9 -5
- web/src/pages/user-setting/setting-model/ollama-modal/index.tsx +7 -6
- web/src/pages/user-setting/utils.ts +4 -0
web/src/assets/svg/enable.svg
CHANGED
|
|
web/src/assets/svg/llm/xinference.svg
ADDED
|
web/src/locales/en.ts
CHANGED
@@ -397,13 +397,14 @@ export default {
|
|
397 |
upgrade: 'Upgrade',
|
398 |
addLlmTitle: 'Add LLM',
|
399 |
modelName: 'Model name',
|
|
|
400 |
modelNameMessage: 'Please input your model name!',
|
401 |
modelType: 'Model type',
|
402 |
modelTypeMessage: 'Please input your model type!',
|
403 |
addLlmBaseUrl: 'Base url',
|
404 |
baseUrlNameMessage: 'Please input your base url!',
|
405 |
vision: 'Does it support Vision?',
|
406 |
-
ollamaLink: 'How to integrate
|
407 |
},
|
408 |
message: {
|
409 |
registered: 'Registered!',
|
|
|
397 |
upgrade: 'Upgrade',
|
398 |
addLlmTitle: 'Add LLM',
|
399 |
modelName: 'Model name',
|
400 |
+
modelUid: 'Model UID',
|
401 |
modelNameMessage: 'Please input your model name!',
|
402 |
modelType: 'Model type',
|
403 |
modelTypeMessage: 'Please input your model type!',
|
404 |
addLlmBaseUrl: 'Base url',
|
405 |
baseUrlNameMessage: 'Please input your base url!',
|
406 |
vision: 'Does it support Vision?',
|
407 |
+
ollamaLink: 'How to integrate {{name}}',
|
408 |
},
|
409 |
message: {
|
410 |
registered: 'Registered!',
|
web/src/locales/zh.ts
CHANGED
@@ -382,13 +382,14 @@ export default {
|
|
382 |
upgrade: '升级',
|
383 |
addLlmTitle: '添加 LLM',
|
384 |
modelName: '模型名称',
|
|
|
385 |
modelType: '模型类型',
|
386 |
addLlmBaseUrl: '基础 Url',
|
387 |
vision: '是否支持 Vision',
|
388 |
modelNameMessage: '请输入模型名称!',
|
389 |
modelTypeMessage: '请输入模型类型!',
|
390 |
baseUrlNameMessage: '请输入基础 Url!',
|
391 |
-
ollamaLink: '如何集成
|
392 |
},
|
393 |
message: {
|
394 |
registered: '注册成功',
|
|
|
382 |
upgrade: '升级',
|
383 |
addLlmTitle: '添加 LLM',
|
384 |
modelName: '模型名称',
|
385 |
+
modelUid: '模型UID',
|
386 |
modelType: '模型类型',
|
387 |
addLlmBaseUrl: '基础 Url',
|
388 |
vision: '是否支持 Vision',
|
389 |
modelNameMessage: '请输入模型名称!',
|
390 |
modelTypeMessage: '请输入模型类型!',
|
391 |
baseUrlNameMessage: '请输入基础 Url!',
|
392 |
+
ollamaLink: '如何集成 {{name}}',
|
393 |
},
|
394 |
message: {
|
395 |
registered: '注册成功',
|
web/src/pages/user-setting/constants.tsx
CHANGED
@@ -14,3 +14,5 @@ export const UserSettingIconMap = {
|
|
14 |
};
|
15 |
|
16 |
export * from '@/constants/setting';
|
|
|
|
|
|
14 |
};
|
15 |
|
16 |
export * from '@/constants/setting';
|
17 |
+
|
18 |
+
export const LocalLlmFactories = ['Ollama', 'Xinference'];
|
web/src/pages/user-setting/setting-model/hooks.ts
CHANGED
@@ -132,6 +132,7 @@ export const useSelectModelProvidersLoading = () => {
|
|
132 |
|
133 |
export const useSubmitOllama = () => {
|
134 |
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
|
|
|
135 |
const addLlm = useAddLlm();
|
136 |
const {
|
137 |
visible: llmAddingVisible,
|
@@ -149,11 +150,17 @@ export const useSubmitOllama = () => {
|
|
149 |
[hideLlmAddingModal, addLlm],
|
150 |
);
|
151 |
|
|
|
|
|
|
|
|
|
|
|
152 |
return {
|
153 |
llmAddingLoading: loading,
|
154 |
onLlmAddingOk,
|
155 |
llmAddingVisible,
|
156 |
hideLlmAddingModal,
|
157 |
-
showLlmAddingModal,
|
|
|
158 |
};
|
159 |
};
|
|
|
132 |
|
133 |
export const useSubmitOllama = () => {
|
134 |
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
|
135 |
+
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
|
136 |
const addLlm = useAddLlm();
|
137 |
const {
|
138 |
visible: llmAddingVisible,
|
|
|
150 |
[hideLlmAddingModal, addLlm],
|
151 |
);
|
152 |
|
153 |
+
const handleShowLlmAddingModal = (llmFactory: string) => {
|
154 |
+
setSelectedLlmFactory(llmFactory);
|
155 |
+
showLlmAddingModal();
|
156 |
+
};
|
157 |
+
|
158 |
return {
|
159 |
llmAddingLoading: loading,
|
160 |
onLlmAddingOk,
|
161 |
llmAddingVisible,
|
162 |
hideLlmAddingModal,
|
163 |
+
showLlmAddingModal: handleShowLlmAddingModal,
|
164 |
+
selectedLlmFactory,
|
165 |
};
|
166 |
};
|
web/src/pages/user-setting/setting-model/index.tsx
CHANGED
@@ -25,6 +25,7 @@ import {
|
|
25 |
} from 'antd';
|
26 |
import { useCallback } from 'react';
|
27 |
import SettingTitle from '../components/setting-title';
|
|
|
28 |
import ApiKeyModal from './api-key-modal';
|
29 |
import {
|
30 |
useSelectModelProvidersLoading,
|
@@ -43,6 +44,7 @@ const IconMap = {
|
|
43 |
'ZHIPU-AI': 'zhipu',
|
44 |
文心一言: 'wenxin',
|
45 |
Ollama: 'ollama',
|
|
|
46 |
};
|
47 |
|
48 |
const LlmIcon = ({ name }: { name: string }) => {
|
@@ -89,7 +91,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
|
|
89 |
<Col span={12} className={styles.factoryOperationWrapper}>
|
90 |
<Space size={'middle'}>
|
91 |
<Button onClick={handleApiKeyClick}>
|
92 |
-
{item.name
|
93 |
<SettingOutlined />
|
94 |
</Button>
|
95 |
<Button onClick={handleShowMoreClick}>
|
@@ -147,12 +149,13 @@ const UserSettingModel = () => {
|
|
147 |
showLlmAddingModal,
|
148 |
onLlmAddingOk,
|
149 |
llmAddingLoading,
|
|
|
150 |
} = useSubmitOllama();
|
151 |
|
152 |
const handleApiKeyClick = useCallback(
|
153 |
(llmFactory: string) => {
|
154 |
-
if (llmFactory
|
155 |
-
showLlmAddingModal();
|
156 |
} else {
|
157 |
showApiKeyModal({ llm_factory: llmFactory });
|
158 |
}
|
@@ -161,8 +164,8 @@ const UserSettingModel = () => {
|
|
161 |
);
|
162 |
|
163 |
const handleAddModel = (llmFactory: string) => () => {
|
164 |
-
if (llmFactory
|
165 |
-
showLlmAddingModal();
|
166 |
} else {
|
167 |
handleApiKeyClick(llmFactory);
|
168 |
}
|
@@ -252,6 +255,7 @@ const UserSettingModel = () => {
|
|
252 |
hideModal={hideLlmAddingModal}
|
253 |
onOk={onLlmAddingOk}
|
254 |
loading={llmAddingLoading}
|
|
|
255 |
></OllamaModal>
|
256 |
</>
|
257 |
);
|
|
|
25 |
} from 'antd';
|
26 |
import { useCallback } from 'react';
|
27 |
import SettingTitle from '../components/setting-title';
|
28 |
+
import { isLocalLlmFactory } from '../utils';
|
29 |
import ApiKeyModal from './api-key-modal';
|
30 |
import {
|
31 |
useSelectModelProvidersLoading,
|
|
|
44 |
'ZHIPU-AI': 'zhipu',
|
45 |
文心一言: 'wenxin',
|
46 |
Ollama: 'ollama',
|
47 |
+
Xinference: 'xinference',
|
48 |
};
|
49 |
|
50 |
const LlmIcon = ({ name }: { name: string }) => {
|
|
|
91 |
<Col span={12} className={styles.factoryOperationWrapper}>
|
92 |
<Space size={'middle'}>
|
93 |
<Button onClick={handleApiKeyClick}>
|
94 |
+
{isLocalLlmFactory(item.name) ? t('addTheModel') : 'API-Key'}
|
95 |
<SettingOutlined />
|
96 |
</Button>
|
97 |
<Button onClick={handleShowMoreClick}>
|
|
|
149 |
showLlmAddingModal,
|
150 |
onLlmAddingOk,
|
151 |
llmAddingLoading,
|
152 |
+
selectedLlmFactory,
|
153 |
} = useSubmitOllama();
|
154 |
|
155 |
const handleApiKeyClick = useCallback(
|
156 |
(llmFactory: string) => {
|
157 |
+
if (isLocalLlmFactory(llmFactory)) {
|
158 |
+
showLlmAddingModal(llmFactory);
|
159 |
} else {
|
160 |
showApiKeyModal({ llm_factory: llmFactory });
|
161 |
}
|
|
|
164 |
);
|
165 |
|
166 |
const handleAddModel = (llmFactory: string) => () => {
|
167 |
+
if (isLocalLlmFactory(llmFactory)) {
|
168 |
+
showLlmAddingModal(llmFactory);
|
169 |
} else {
|
170 |
handleApiKeyClick(llmFactory);
|
171 |
}
|
|
|
255 |
hideModal={hideLlmAddingModal}
|
256 |
onOk={onLlmAddingOk}
|
257 |
loading={llmAddingLoading}
|
258 |
+
llmFactory={selectedLlmFactory}
|
259 |
></OllamaModal>
|
260 |
</>
|
261 |
);
|
web/src/pages/user-setting/setting-model/ollama-modal/index.tsx
CHANGED
@@ -13,7 +13,8 @@ const OllamaModal = ({
|
|
13 |
hideModal,
|
14 |
onOk,
|
15 |
loading,
|
16 |
-
|
|
|
17 |
const [form] = Form.useForm<FieldType>();
|
18 |
|
19 |
const { t } = useTranslate('setting');
|
@@ -28,7 +29,7 @@ const OllamaModal = ({
|
|
28 |
const data = {
|
29 |
...omit(values, ['vision']),
|
30 |
model_type: modelType,
|
31 |
-
llm_factory:
|
32 |
};
|
33 |
console.info(data);
|
34 |
|
@@ -37,7 +38,7 @@ const OllamaModal = ({
|
|
37 |
|
38 |
return (
|
39 |
<Modal
|
40 |
-
title={t('addLlmTitle')}
|
41 |
open={visible}
|
42 |
onOk={handleOk}
|
43 |
onCancel={hideModal}
|
@@ -46,11 +47,11 @@ const OllamaModal = ({
|
|
46 |
return (
|
47 |
<Flex justify={'space-between'}>
|
48 |
<a
|
49 |
-
href=
|
50 |
target="_blank"
|
51 |
rel="noreferrer"
|
52 |
>
|
53 |
-
{t('ollamaLink')}
|
54 |
</a>
|
55 |
<Space>{originNode}</Space>
|
56 |
</Flex>
|
@@ -76,7 +77,7 @@ const OllamaModal = ({
|
|
76 |
</Select>
|
77 |
</Form.Item>
|
78 |
<Form.Item<FieldType>
|
79 |
-
label={t('modelName')}
|
80 |
name="llm_name"
|
81 |
rules={[{ required: true, message: t('modelNameMessage') }]}
|
82 |
>
|
|
|
13 |
hideModal,
|
14 |
onOk,
|
15 |
loading,
|
16 |
+
llmFactory,
|
17 |
+
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
|
18 |
const [form] = Form.useForm<FieldType>();
|
19 |
|
20 |
const { t } = useTranslate('setting');
|
|
|
29 |
const data = {
|
30 |
...omit(values, ['vision']),
|
31 |
model_type: modelType,
|
32 |
+
llm_factory: llmFactory,
|
33 |
};
|
34 |
console.info(data);
|
35 |
|
|
|
38 |
|
39 |
return (
|
40 |
<Modal
|
41 |
+
title={t('addLlmTitle', { name: llmFactory })}
|
42 |
open={visible}
|
43 |
onOk={handleOk}
|
44 |
onCancel={hideModal}
|
|
|
47 |
return (
|
48 |
<Flex justify={'space-between'}>
|
49 |
<a
|
50 |
+
href={`https://github.com/infiniflow/ragflow/blob/main/docs/${llmFactory.toLowerCase()}.md`}
|
51 |
target="_blank"
|
52 |
rel="noreferrer"
|
53 |
>
|
54 |
+
{t('ollamaLink', { name: llmFactory })}
|
55 |
</a>
|
56 |
<Space>{originNode}</Space>
|
57 |
</Flex>
|
|
|
77 |
</Select>
|
78 |
</Form.Item>
|
79 |
<Form.Item<FieldType>
|
80 |
+
label={t(llmFactory === 'Xinference' ? 'modelUid' : 'modelName')}
|
81 |
name="llm_name"
|
82 |
rules={[{ required: true, message: t('modelNameMessage') }]}
|
83 |
>
|
web/src/pages/user-setting/utils.ts
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { LocalLlmFactories } from './constants';
|
2 |
+
|
3 |
+
export const isLocalLlmFactory = (llmFactory: string) =>
|
4 |
+
LocalLlmFactories.some((x) => x === llmFactory);
|