黄腾 aopstudio commited on
Commit
efaa250
·
1 Parent(s): 584001f

add support for Tencent Hunyuan (#2015)

Browse files

### What problem does this PR solve?

#1853

### Type of change


- [X] New Feature (non-breaking change which adds functionality)

Co-authored-by: Zhedong Cen <[email protected]>

api/apps/llm_app.py CHANGED
@@ -106,7 +106,7 @@ def set_api_key():
106
 
107
  @manager.route('/add_llm', methods=['POST'])
108
  @login_required
109
- @validate_request("llm_factory", "llm_name", "model_type")
110
  def add_llm():
111
  req = request.json
112
  factory = req["llm_factory"]
@@ -120,6 +120,11 @@ def add_llm():
120
  api_key = '{' + f'"volc_ak": "{req.get("volc_ak", "")}", ' \
121
  f'"volc_sk": "{req.get("volc_sk", "")}", ' \
122
  f'"ep_id": "{endpoint_id}", ' + '}'
 
 
 
 
 
123
  elif factory == "Bedrock":
124
  # For Bedrock, due to its special authentication method
125
  # Assemble bedrock_ak, bedrock_sk, bedrock_region
@@ -132,7 +137,7 @@ def add_llm():
132
  api_key = "xxxxxxxxxxxxxxx"
133
  elif factory == "OpenAI-API-Compatible":
134
  llm_name = req["llm_name"]+"___OpenAI-API"
135
- api_key = req.get("api_key","xxxxxxxxxxxxxxx")
136
  else:
137
  llm_name = req["llm_name"]
138
  api_key = req.get("api_key","xxxxxxxxxxxxxxx")
 
106
 
107
  @manager.route('/add_llm', methods=['POST'])
108
  @login_required
109
+ @validate_request("llm_factory")
110
  def add_llm():
111
  req = request.json
112
  factory = req["llm_factory"]
 
120
  api_key = '{' + f'"volc_ak": "{req.get("volc_ak", "")}", ' \
121
  f'"volc_sk": "{req.get("volc_sk", "")}", ' \
122
  f'"ep_id": "{endpoint_id}", ' + '}'
123
+ elif factory == "Tencent Hunyuan":
124
+ api_key = '{' + f'"hunyuan_sid": "{req.get("hunyuan_sid", "")}", ' \
125
+ f'"hunyuan_sk": "{req.get("hunyuan_sk", "")}"' + '}'
126
+ req["api_key"] = api_key
127
+ return set_api_key()
128
  elif factory == "Bedrock":
129
  # For Bedrock, due to its special authentication method
130
  # Assemble bedrock_ak, bedrock_sk, bedrock_region
 
137
  api_key = "xxxxxxxxxxxxxxx"
138
  elif factory == "OpenAI-API-Compatible":
139
  llm_name = req["llm_name"]+"___OpenAI-API"
140
+ api_key = req.get("api_key","xxxxxxxxxxxxxxx")
141
  else:
142
  llm_name = req["llm_name"]
143
  api_key = req.get("api_key","xxxxxxxxxxxxxxx")
conf/llm_factories.json CHANGED
@@ -3156,6 +3156,44 @@
3156
  "tags": "LLM,TEXT EMBEDDING",
3157
  "status": "1",
3158
  "llm": []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3159
  }
3160
  ]
3161
  }
 
3156
  "tags": "LLM,TEXT EMBEDDING",
3157
  "status": "1",
3158
  "llm": []
3159
+ },
3160
+ {
3161
+ "name": "Tencent Hunyuan",
3162
+ "logo": "",
3163
+ "tags": "LLM,IMAGE2TEXT",
3164
+ "status": "1",
3165
+ "llm": [
3166
+ {
3167
+ "llm_name": "hunyuan-pro",
3168
+ "tags": "LLM,CHAT,32k",
3169
+ "max_tokens": 32768,
3170
+ "model_type": "chat"
3171
+ },
3172
+ {
3173
+ "llm_name": "hunyuan-standard",
3174
+ "tags": "LLM,CHAT,32k",
3175
+ "max_tokens": 32768,
3176
+ "model_type": "chat"
3177
+ },
3178
+ {
3179
+ "llm_name": "hunyuan-standard-256K",
3180
+ "tags": "LLM,CHAT,256k",
3181
+ "max_tokens": 262144,
3182
+ "model_type": "chat"
3183
+ },
3184
+ {
3185
+ "llm_name": "hunyuan-lite",
3186
+ "tags": "LLM,CHAT,256k",
3187
+ "max_tokens": 262144,
3188
+ "model_type": "chat"
3189
+ },
3190
+ {
3191
+ "llm_name": "hunyuan-vision",
3192
+ "tags": "LLM,IMAGE2TEXT,8k",
3193
+ "max_tokens": 8192,
3194
+ "model_type": "image2text"
3195
+ }
3196
+ ]
3197
  }
3198
  ]
3199
  }
rag/llm/__init__.py CHANGED
@@ -63,7 +63,8 @@ CvModel = {
63
  "StepFun":StepFunCV,
64
  "OpenAI-API-Compatible": OpenAI_APICV,
65
  "TogetherAI": TogetherAICV,
66
- "01.AI": YiCV
 
67
  }
68
 
69
 
@@ -98,7 +99,8 @@ ChatModel = {
98
  "novita.ai": NovitaAIChat,
99
  "SILICONFLOW": SILICONFLOWChat,
100
  "01.AI": YiChat,
101
- "Replicate": ReplicateChat
 
102
  }
103
 
104
 
 
63
  "StepFun":StepFunCV,
64
  "OpenAI-API-Compatible": OpenAI_APICV,
65
  "TogetherAI": TogetherAICV,
66
+ "01.AI": YiCV,
67
+ "Tencent Hunyuan": HunyuanCV
68
  }
69
 
70
 
 
99
  "novita.ai": NovitaAIChat,
100
  "SILICONFLOW": SILICONFLOWChat,
101
  "01.AI": YiChat,
102
+ "Replicate": ReplicateChat,
103
+ "Tencent Hunyuan": HunyuanChat
104
  }
105
 
106
 
rag/llm/chat_model.py CHANGED
@@ -1088,3 +1088,83 @@ class ReplicateChat(Base):
1088
  yield ans + "\n**ERROR**: " + str(e)
1089
 
1090
  yield num_tokens_from_string(ans)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1088
  yield ans + "\n**ERROR**: " + str(e)
1089
 
1090
  yield num_tokens_from_string(ans)
1091
+
1092
+
1093
+ class HunyuanChat(Base):
1094
+ def __init__(self, key, model_name, base_url=None):
1095
+ from tencentcloud.common import credential
1096
+ from tencentcloud.hunyuan.v20230901 import hunyuan_client
1097
+
1098
+ key = json.loads(key)
1099
+ sid = key.get("hunyuan_sid", "")
1100
+ sk = key.get("hunyuan_sk", "")
1101
+ cred = credential.Credential(sid, sk)
1102
+ self.model_name = model_name
1103
+ self.client = hunyuan_client.HunyuanClient(cred, "")
1104
+
1105
+ def chat(self, system, history, gen_conf):
1106
+ from tencentcloud.hunyuan.v20230901 import models
1107
+ from tencentcloud.common.exception.tencent_cloud_sdk_exception import (
1108
+ TencentCloudSDKException,
1109
+ )
1110
+
1111
+ _gen_conf = {}
1112
+ _history = [{k.capitalize(): v for k, v in item.items() } for item in history]
1113
+ if system:
1114
+ _history.insert(0, {"Role": "system", "Content": system})
1115
+ if "temperature" in gen_conf:
1116
+ _gen_conf["Temperature"] = gen_conf["temperature"]
1117
+ if "top_p" in gen_conf:
1118
+ _gen_conf["TopP"] = gen_conf["top_p"]
1119
+
1120
+ req = models.ChatCompletionsRequest()
1121
+ params = {"Model": self.model_name, "Messages": _history, **_gen_conf}
1122
+ req.from_json_string(json.dumps(params))
1123
+ ans = ""
1124
+ try:
1125
+ response = self.client.ChatCompletions(req)
1126
+ ans = response.Choices[0].Message.Content
1127
+ return ans, response.Usage.TotalTokens
1128
+ except TencentCloudSDKException as e:
1129
+ return ans + "\n**ERROR**: " + str(e), 0
1130
+
1131
+ def chat_streamly(self, system, history, gen_conf):
1132
+ from tencentcloud.hunyuan.v20230901 import models
1133
+ from tencentcloud.common.exception.tencent_cloud_sdk_exception import (
1134
+ TencentCloudSDKException,
1135
+ )
1136
+
1137
+ _gen_conf = {}
1138
+ _history = [{k.capitalize(): v for k, v in item.items() } for item in history]
1139
+ if system:
1140
+ _history.insert(0, {"Role": "system", "Content": system})
1141
+
1142
+ if "temperature" in gen_conf:
1143
+ _gen_conf["Temperature"] = gen_conf["temperature"]
1144
+ if "top_p" in gen_conf:
1145
+ _gen_conf["TopP"] = gen_conf["top_p"]
1146
+ req = models.ChatCompletionsRequest()
1147
+ params = {
1148
+ "Model": self.model_name,
1149
+ "Messages": _history,
1150
+ "Stream": True,
1151
+ **_gen_conf,
1152
+ }
1153
+ req.from_json_string(json.dumps(params))
1154
+ ans = ""
1155
+ total_tokens = 0
1156
+ try:
1157
+ response = self.client.ChatCompletions(req)
1158
+ for resp in response:
1159
+ resp = json.loads(resp["data"])
1160
+ if not resp["Choices"] or not resp["Choices"][0]["Delta"]["Content"]:
1161
+ continue
1162
+ ans += resp["Choices"][0]["Delta"]["Content"]
1163
+ total_tokens += 1
1164
+
1165
+ yield ans
1166
+
1167
+ except TencentCloudSDKException as e:
1168
+ yield ans + "\n**ERROR**: " + str(e)
1169
+
1170
+ yield total_tokens
rag/llm/cv_model.py CHANGED
@@ -664,4 +664,56 @@ class YiCV(GptV4):
664
  def __init__(self, key, model_name, lang="Chinese",base_url="https://api.lingyiwanwu.com/v1",):
665
  if not base_url:
666
  base_url = "https://api.lingyiwanwu.com/v1"
667
- super().__init__(key, model_name,lang,base_url)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
664
  def __init__(self, key, model_name, lang="Chinese",base_url="https://api.lingyiwanwu.com/v1",):
665
  if not base_url:
666
  base_url = "https://api.lingyiwanwu.com/v1"
667
+ super().__init__(key, model_name,lang,base_url)
668
+
669
+
670
+ class HunyuanCV(Base):
671
+ def __init__(self, key, model_name, lang="Chinese",base_url=None):
672
+ from tencentcloud.common import credential
673
+ from tencentcloud.hunyuan.v20230901 import hunyuan_client
674
+
675
+ key = json.loads(key)
676
+ sid = key.get("hunyuan_sid", "")
677
+ sk = key.get("hunyuan_sk", "")
678
+ cred = credential.Credential(sid, sk)
679
+ self.model_name = model_name
680
+ self.client = hunyuan_client.HunyuanClient(cred, "")
681
+ self.lang = lang
682
+
683
+ def describe(self, image, max_tokens=4096):
684
+ from tencentcloud.hunyuan.v20230901 import models
685
+ from tencentcloud.common.exception.tencent_cloud_sdk_exception import (
686
+ TencentCloudSDKException,
687
+ )
688
+
689
+ b64 = self.image2base64(image)
690
+ req = models.ChatCompletionsRequest()
691
+ params = {"Model": self.model_name, "Messages": self.prompt(b64)}
692
+ req.from_json_string(json.dumps(params))
693
+ ans = ""
694
+ try:
695
+ response = self.client.ChatCompletions(req)
696
+ ans = response.Choices[0].Message.Content
697
+ return ans, response.Usage.TotalTokens
698
+ except TencentCloudSDKException as e:
699
+ return ans + "\n**ERROR**: " + str(e), 0
700
+
701
+ def prompt(self, b64):
702
+ return [
703
+ {
704
+ "Role": "user",
705
+ "Contents": [
706
+ {
707
+ "Type": "image_url",
708
+ "ImageUrl": {
709
+ "Url": f"data:image/jpeg;base64,{b64}"
710
+ },
711
+ },
712
+ {
713
+ "Type": "text",
714
+ "Text": "请用中文详细描述一下图中的内容,比如时间,地点,人物,事情,人物心情等,如果有数据请提取出数据。" if self.lang.lower() == "chinese" else
715
+ "Please describe the content of this picture, like where, when, who, what happen. If it has number data, please extract them out.",
716
+ },
717
+ ],
718
+ }
719
+ ]
requirements.txt CHANGED
@@ -76,6 +76,7 @@ Shapely==2.0.5
76
  six==1.16.0
77
  StrEnum==0.4.15
78
  tabulate==0.9.0
 
79
  tika==2.6.0
80
  tiktoken==0.6.0
81
  torch==2.3.0
 
76
  six==1.16.0
77
  StrEnum==0.4.15
78
  tabulate==0.9.0
79
+ tencentcloud-sdk-python==3.0.1215
80
  tika==2.6.0
81
  tiktoken==0.6.0
82
  torch==2.3.0
requirements_arm.txt CHANGED
@@ -115,6 +115,7 @@ six==1.16.0
115
  sniffio==1.3.1
116
  StrEnum==0.4.15
117
  sympy==1.12
 
118
  threadpoolctl==3.3.0
119
  tika==2.6.0
120
  tiktoken==0.6.0
 
115
  sniffio==1.3.1
116
  StrEnum==0.4.15
117
  sympy==1.12
118
+ tencentcloud-sdk-python==3.0.1215
119
  threadpoolctl==3.3.0
120
  tika==2.6.0
121
  tiktoken==0.6.0
web/src/assets/svg/llm/hunyuan.svg ADDED
web/src/locales/en.ts CHANGED
@@ -521,6 +521,10 @@ The above is the content you need to summarize.`,
521
  'eu-central-1': 'Europe (Frankfurt)',
522
  'us-gov-west-1': 'AWS GovCloud (US-West)',
523
  'ap-southeast-2': 'Asia Pacific (Sydney)',
 
 
 
 
524
  },
525
  message: {
526
  registered: 'Registered!',
 
521
  'eu-central-1': 'Europe (Frankfurt)',
522
  'us-gov-west-1': 'AWS GovCloud (US-West)',
523
  'ap-southeast-2': 'Asia Pacific (Sydney)',
524
+ addHunyuanSID: 'Hunyuan Secret ID',
525
+ HunyuanSIDMessage: 'Please input your Secret ID',
526
+ addHunyuanSK: 'Hunyuan Secret Key',
527
+ HunyuanSKMessage: 'Please input your Secret Key',
528
  },
529
  message: {
530
  registered: 'Registered!',
web/src/locales/zh-traditional.ts CHANGED
@@ -484,6 +484,10 @@ export default {
484
  'eu-central-1': '歐洲 (法蘭克福)',
485
  'us-gov-west-1': 'AWS GovCloud (US-West)',
486
  'ap-southeast-2': '亞太地區 (雪梨)',
 
 
 
 
487
  },
488
  message: {
489
  registered: '註冊成功',
 
484
  'eu-central-1': '歐洲 (法蘭克福)',
485
  'us-gov-west-1': 'AWS GovCloud (US-West)',
486
  'ap-southeast-2': '亞太地區 (雪梨)',
487
+ addHunyuanSID: '混元 Secret ID',
488
+ HunyuanSIDMessage: '請輸入 Secret ID',
489
+ addHunyuanSK: '混元 Secret Key',
490
+ HunyuanSKMessage: '請輸入 Secret Key',
491
  },
492
  message: {
493
  registered: '註冊成功',
web/src/locales/zh.ts CHANGED
@@ -501,6 +501,10 @@ export default {
501
  'eu-central-1': '欧洲 (法兰克福)',
502
  'us-gov-west-1': 'AWS GovCloud (US-West)',
503
  'ap-southeast-2': '亚太地区 (悉尼)',
 
 
 
 
504
  },
505
  message: {
506
  registered: '注册成功',
 
501
  'eu-central-1': '欧洲 (法兰克福)',
502
  'us-gov-west-1': 'AWS GovCloud (US-West)',
503
  'ap-southeast-2': '亚太地区 (悉尼)',
504
+ addHunyuanSID: '混元 Secret ID',
505
+ HunyuanSIDMessage: '请输入 Secret ID',
506
+ addHunyuanSK: '混元 Secret Key',
507
+ HunyuanSKMessage: '请输入 Secret Key',
508
  },
509
  message: {
510
  registered: '注册成功',
web/src/pages/user-setting/setting-model/constant.ts CHANGED
@@ -30,8 +30,9 @@ export const IconMap = {
30
  Upstage: 'upstage',
31
  'novita.ai': 'novita-ai',
32
  SILICONFLOW: 'siliconflow',
33
- "01.AI": 'yi',
34
- "Replicate": 'replicate'
 
35
  };
36
 
37
  export const BedrockRegionList = [
 
30
  Upstage: 'upstage',
31
  'novita.ai': 'novita-ai',
32
  SILICONFLOW: 'siliconflow',
33
+ '01.AI': 'yi',
34
+ Replicate: 'replicate',
35
+ 'Tencent Hunyuan': 'hunyuan',
36
  };
37
 
38
  export const BedrockRegionList = [
web/src/pages/user-setting/setting-model/hooks.ts CHANGED
@@ -163,6 +163,33 @@ export const useSubmitVolcEngine = () => {
163
  };
164
  };
165
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  export const useSubmitBedrock = () => {
167
  const { addLlm, loading } = useAddLlm();
168
  const {
 
163
  };
164
  };
165
 
166
+ export const useSubmitHunyuan = () => {
167
+ const { addLlm, loading } = useAddLlm();
168
+ const {
169
+ visible: HunyuanAddingVisible,
170
+ hideModal: hideHunyuanAddingModal,
171
+ showModal: showHunyuanAddingModal,
172
+ } = useSetModalState();
173
+
174
+ const onHunyuanAddingOk = useCallback(
175
+ async (payload: IAddLlmRequestBody) => {
176
+ const ret = await addLlm(payload);
177
+ if (ret === 0) {
178
+ hideHunyuanAddingModal();
179
+ }
180
+ },
181
+ [hideHunyuanAddingModal, addLlm],
182
+ );
183
+
184
+ return {
185
+ HunyuanAddingLoading: loading,
186
+ onHunyuanAddingOk,
187
+ HunyuanAddingVisible,
188
+ hideHunyuanAddingModal,
189
+ showHunyuanAddingModal,
190
+ };
191
+ };
192
+
193
  export const useSubmitBedrock = () => {
194
  const { addLlm, loading } = useAddLlm();
195
  const {
web/src/pages/user-setting/setting-model/hunyuan-modal/index.tsx ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useTranslate } from '@/hooks/common-hooks';
2
+ import { IModalProps } from '@/interfaces/common';
3
+ import { IAddLlmRequestBody } from '@/interfaces/request/llm';
4
+ import { Form, Input, Modal, Select } from 'antd';
5
+ import omit from 'lodash/omit';
6
+
7
+ type FieldType = IAddLlmRequestBody & {
8
+ vision: boolean;
9
+ hunyuan_sid: string;
10
+ hunyuan_sk: string;
11
+ };
12
+
13
+ const { Option } = Select;
14
+
15
+ const HunyuanModal = ({
16
+ visible,
17
+ hideModal,
18
+ onOk,
19
+ loading,
20
+ llmFactory,
21
+ }: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
22
+ const [form] = Form.useForm<FieldType>();
23
+
24
+ const { t } = useTranslate('setting');
25
+
26
+ const handleOk = async () => {
27
+ const values = await form.validateFields();
28
+ const modelType =
29
+ values.model_type === 'chat' && values.vision
30
+ ? 'image2text'
31
+ : values.model_type;
32
+
33
+ const data = {
34
+ ...omit(values, ['vision']),
35
+ model_type: modelType,
36
+ llm_factory: llmFactory,
37
+ };
38
+ console.info(data);
39
+
40
+ onOk?.(data);
41
+ };
42
+
43
+ return (
44
+ <Modal
45
+ title={t('addLlmTitle', { name: llmFactory })}
46
+ open={visible}
47
+ onOk={handleOk}
48
+ onCancel={hideModal}
49
+ okButtonProps={{ loading }}
50
+ confirmLoading={loading}
51
+ >
52
+ <Form
53
+ name="basic"
54
+ style={{ maxWidth: 600 }}
55
+ autoComplete="off"
56
+ layout={'vertical'}
57
+ form={form}
58
+ >
59
+ <Form.Item<FieldType>
60
+ label={t('addHunyuanSID')}
61
+ name="hunyuan_sid"
62
+ rules={[{ required: true, message: t('HunyuanSIDMessage') }]}
63
+ >
64
+ <Input placeholder={t('HunyuanSIDMessage')} />
65
+ </Form.Item>
66
+ <Form.Item<FieldType>
67
+ label={t('addHunyuanSK')}
68
+ name="hunyuan_sk"
69
+ rules={[{ required: true, message: t('HunyuanSKMessage') }]}
70
+ >
71
+ <Input placeholder={t('HunyuanSKMessage')} />
72
+ </Form.Item>
73
+ </Form>
74
+ </Modal>
75
+ );
76
+ };
77
+
78
+ export default HunyuanModal;
web/src/pages/user-setting/setting-model/index.tsx CHANGED
@@ -34,10 +34,12 @@ import {
34
  useHandleDeleteLlm,
35
  useSubmitApiKey,
36
  useSubmitBedrock,
 
37
  useSubmitOllama,
38
  useSubmitSystemModelSetting,
39
  useSubmitVolcEngine,
40
  } from './hooks';
 
41
  import styles from './index.less';
42
  import OllamaModal from './ollama-modal';
43
  import SystemModelSettingModal from './system-model-setting-modal';
@@ -88,7 +90,9 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
88
  <Col span={12} className={styles.factoryOperationWrapper}>
89
  <Space size={'middle'}>
90
  <Button onClick={handleApiKeyClick}>
91
- {isLocalLlmFactory(item.name) || item.name === 'VolcEngine'
 
 
92
  ? t('addTheModel')
93
  : 'API-Key'}
94
  <SettingOutlined />
@@ -162,6 +166,14 @@ const UserSettingModel = () => {
162
  volcAddingLoading,
163
  } = useSubmitVolcEngine();
164
 
 
 
 
 
 
 
 
 
165
  const {
166
  bedrockAddingLoading,
167
  onBedrockAddingOk,
@@ -174,8 +186,9 @@ const UserSettingModel = () => {
174
  () => ({
175
  Bedrock: showBedrockAddingModal,
176
  VolcEngine: showVolcAddingModal,
 
177
  }),
178
- [showBedrockAddingModal, showVolcAddingModal],
179
  );
180
 
181
  const handleAddModel = useCallback(
@@ -286,6 +299,13 @@ const UserSettingModel = () => {
286
  loading={volcAddingLoading}
287
  llmFactory={'VolcEngine'}
288
  ></VolcEngineModal>
 
 
 
 
 
 
 
289
  <BedrockModal
290
  visible={bedrockAddingVisible}
291
  hideModal={hideBedrockAddingModal}
 
34
  useHandleDeleteLlm,
35
  useSubmitApiKey,
36
  useSubmitBedrock,
37
+ useSubmitHunyuan,
38
  useSubmitOllama,
39
  useSubmitSystemModelSetting,
40
  useSubmitVolcEngine,
41
  } from './hooks';
42
+ import HunyuanModal from './hunyuan-modal';
43
  import styles from './index.less';
44
  import OllamaModal from './ollama-modal';
45
  import SystemModelSettingModal from './system-model-setting-modal';
 
90
  <Col span={12} className={styles.factoryOperationWrapper}>
91
  <Space size={'middle'}>
92
  <Button onClick={handleApiKeyClick}>
93
+ {isLocalLlmFactory(item.name) ||
94
+ item.name === 'VolcEngine' ||
95
+ item.name === 'Tencent Hunyuan'
96
  ? t('addTheModel')
97
  : 'API-Key'}
98
  <SettingOutlined />
 
166
  volcAddingLoading,
167
  } = useSubmitVolcEngine();
168
 
169
+ const {
170
+ HunyuanAddingVisible,
171
+ hideHunyuanAddingModal,
172
+ showHunyuanAddingModal,
173
+ onHunyuanAddingOk,
174
+ HunyuanAddingLoading,
175
+ } = useSubmitHunyuan();
176
+
177
  const {
178
  bedrockAddingLoading,
179
  onBedrockAddingOk,
 
186
  () => ({
187
  Bedrock: showBedrockAddingModal,
188
  VolcEngine: showVolcAddingModal,
189
+ 'Tencent Hunyuan': showHunyuanAddingModal,
190
  }),
191
+ [showBedrockAddingModal, showVolcAddingModal, showHunyuanAddingModal],
192
  );
193
 
194
  const handleAddModel = useCallback(
 
299
  loading={volcAddingLoading}
300
  llmFactory={'VolcEngine'}
301
  ></VolcEngineModal>
302
+ <HunyuanModal
303
+ visible={HunyuanAddingVisible}
304
+ hideModal={hideHunyuanAddingModal}
305
+ onOk={onHunyuanAddingOk}
306
+ loading={HunyuanAddingLoading}
307
+ llmFactory={'Tencent Hunyuan'}
308
+ ></HunyuanModal>
309
  <BedrockModal
310
  visible={bedrockAddingVisible}
311
  hideModal={hideBedrockAddingModal}