add support for StepFun (#1611)
Browse files### What problem does this PR solve?
#1561
### Type of change
- [x] New Feature (non-breaking change which adds functionality)
---------
Co-authored-by: Zhedong Cen <[email protected]>
conf/llm_factories.json
CHANGED
|
@@ -1886,6 +1886,38 @@
|
|
| 1886 |
"model_type": "chat"
|
| 1887 |
}
|
| 1888 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1889 |
}
|
| 1890 |
]
|
| 1891 |
}
|
|
|
|
| 1886 |
"model_type": "chat"
|
| 1887 |
}
|
| 1888 |
]
|
| 1889 |
+
},
|
| 1890 |
+
{
|
| 1891 |
+
"name": "StepFun",
|
| 1892 |
+
"logo": "",
|
| 1893 |
+
"tags": "LLM",
|
| 1894 |
+
"status": "1",
|
| 1895 |
+
"llm": [
|
| 1896 |
+
{
|
| 1897 |
+
"llm_name": "step-1-8k",
|
| 1898 |
+
"tags": "LLM,CHAT,15k",
|
| 1899 |
+
"max_tokens": 8192,
|
| 1900 |
+
"model_type": "chat"
|
| 1901 |
+
},
|
| 1902 |
+
{
|
| 1903 |
+
"llm_name": "step-1-32k",
|
| 1904 |
+
"tags": "LLM,CHAT,32k",
|
| 1905 |
+
"max_tokens": 32768,
|
| 1906 |
+
"model_type": "chat"
|
| 1907 |
+
},
|
| 1908 |
+
{
|
| 1909 |
+
"llm_name": "step-1-128k",
|
| 1910 |
+
"tags": "LLM,CHAT,128k",
|
| 1911 |
+
"max_tokens": 131072,
|
| 1912 |
+
"model_type": "chat"
|
| 1913 |
+
},
|
| 1914 |
+
{
|
| 1915 |
+
"llm_name": "step-1-256k",
|
| 1916 |
+
"tags": "LLM,CHAT,256k",
|
| 1917 |
+
"max_tokens": 262144,
|
| 1918 |
+
"model_type": "chat"
|
| 1919 |
+
}
|
| 1920 |
+
]
|
| 1921 |
}
|
| 1922 |
]
|
| 1923 |
}
|
rag/llm/__init__.py
CHANGED
|
@@ -71,6 +71,7 @@ ChatModel = {
|
|
| 71 |
"Bedrock": BedrockChat,
|
| 72 |
"Groq": GroqChat,
|
| 73 |
'OpenRouter':OpenRouterChat,
|
|
|
|
| 74 |
}
|
| 75 |
|
| 76 |
|
|
|
|
| 71 |
"Bedrock": BedrockChat,
|
| 72 |
"Groq": GroqChat,
|
| 73 |
'OpenRouter':OpenRouterChat,
|
| 74 |
+
"StepFun":StepFunChat
|
| 75 |
}
|
| 76 |
|
| 77 |
|
rag/llm/chat_model.py
CHANGED
|
@@ -897,3 +897,9 @@ class OpenRouterChat(Base):
|
|
| 897 |
self.base_url = "https://openrouter.ai/api/v1"
|
| 898 |
self.client = OpenAI(base_url=self.base_url, api_key=key)
|
| 899 |
self.model_name = model_name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 897 |
self.base_url = "https://openrouter.ai/api/v1"
|
| 898 |
self.client = OpenAI(base_url=self.base_url, api_key=key)
|
| 899 |
self.model_name = model_name
|
| 900 |
+
|
| 901 |
+
class StepFunChat(Base):
|
| 902 |
+
def __init__(self, key, model_name, base_url="https://api.stepfun.com/v1/chat/completions"):
|
| 903 |
+
if not base_url:
|
| 904 |
+
base_url = "https://api.stepfun.com/v1/chat/completions"
|
| 905 |
+
super().__init__(key, model_name, base_url)
|
web/src/assets/svg/llm/local-ai.svg
ADDED
|
|
web/src/assets/svg/llm/stepfun.svg
ADDED
|
|
web/src/pages/user-setting/setting-model/index.tsx
CHANGED
|
@@ -65,6 +65,8 @@ const IconMap = {
|
|
| 65 |
Gemini: 'gemini',
|
| 66 |
Groq: 'groq-next',
|
| 67 |
OpenRouter: 'open-router',
|
|
|
|
|
|
|
| 68 |
};
|
| 69 |
|
| 70 |
const LlmIcon = ({ name }: { name: string }) => {
|
|
|
|
| 65 |
Gemini: 'gemini',
|
| 66 |
Groq: 'groq-next',
|
| 67 |
OpenRouter: 'open-router',
|
| 68 |
+
LocalAI:'local-ai',
|
| 69 |
+
StepFun:'stepfun'
|
| 70 |
};
|
| 71 |
|
| 72 |
const LlmIcon = ({ name }: { name: string }) => {
|