lmarena / main.ts
isididiidid's picture
Update main.ts
a54d3d9 verified
// main.ts
// 模型名称和ID映射
const MODEL_MAPPING: Record<string, string> = {
'grok-3-preview-02-24': 'bd2c8278-af7a-4ec3-84db-0a426c785564',
'gemini-2.0-flash-001': '7a55108b-b997-4cff-a72f-5aa83beee918',
'chatgpt-4o-latest-20250326': '9513524d-882e-4350-b31e-e4584440c2c8',
'claude-3-5-sonnet-20241022': 'f44e280a-7914-43ca-a25d-ecfcc5d48d09',
'gemini-2.5-flash-preview-05-20': 'ce2092c1-28d4-4d42-a1e0-6b061dfe0b20',
'llama-4-maverick-03-26-experimental': '49bd7403-c7fd-4d91-9829-90a91906ad6c',
'gpt-4.1-2025-04-14': '14e9311c-94d2-40c2-8c54-273947e208b0',
'qwq-32b': '885976d3-d178-48f5-a3f4-6e13e0718872',
'gemini-2.0-flash-preview-image-generation': '69bbf7d4-9f44-447e-a868-abc4f7a31810',
'gpt-4.1-mini-2025-04-14': '6a5437a7-c786-467b-b701-17b0bc8c8231',
'grok-3-mini-beta': '7699c8d4-0742-42f9-a117-d10e84688dab',
'claude-3-7-sonnet-20250219': 'c5a11495-081a-4dc6-8d9a-64a4fd6f7bbc',
'amazon.nova-pro-v1:0': 'a14546b5-d78d-4cf6-bb61-ab5b8510a9d6',
'claude-3-7-sonnet-20250219-thinking-32k': 'be98fcfd-345c-4ae1-9a82-a19123ebf1d2',
'claude-3-5-haiku-20241022': 'f6fbf06c-532c-4c8a-89c7-f3ddcfb34bd1',
'gemma-3-27b-it': '789e245f-eafe-4c72-b563-d135e93988fc',
'o3-2025-04-16': 'cb0f1e24-e8e9-4745-aabc-b926ffde7475',
'o3-mini': 'c680645e-efac-4a81-b0af-da16902b2541',
'o4-mini-2025-04-16': 'f1102bbf-34ca-468f-a9fc-14bcf63f315b',
'command-a-03-2025': '0f785ba1-efcb-472d-961e-69f7b251c7e3',
'gemini-2.5-flash-preview-04-17': '59f510a7-e853-45cb-b8a8-b6b4b24c337e',
'gemini-2.5-flash-preview-04-17-alt': '7fff29a7-93cc-44ab-b685-482c55ce4fa6',
'claude-opus-4-20250514': 'ee116d12-64d6-48a8-88e5-b2d06325cdd2',
'gpt-image-1': '6e855f13-55d7-4127-8656-9168a9f4dcc0',
'photon': '17e31227-36d7-4a7a-943a-7ebffa3a00eb',
'imagen-3.0-generate-002': '51ad1d79-61e2-414c-99e3-faeb64bb6b1b',
'qwen3-235b-a22b': '2595a594-fa54-4299-97cd-2d7380d21c80',
'qwen-max-2025-01-25': 'fe8003fc-2e5d-4a3f-8f07-c1cff7ba0159',
'qwen3-30b-a3b': '9a066f6a-7205-4325-8d0b-d81cc4b049c0',
'claude-sonnet-4-20250514': 'ac44dd10-0666-451c-b824-386ccfea7bcc',
'deepseek-v3-0324': '2f5253e4-75be-473c-bcfc-baeb3df0f8ad',
'llama-3.3-70b-instruct': 'dcbd7897-5a37-4a34-93f1-76a24c7bb028',
'llama-4-maverick-17b-128e-instruct': 'b5ad3ab7-fc56-4ecd-8921-bd56b55c1159',
'mistral-medium-2505': '27b9f8c6-3ee1-464a-9479-a8b3c2a48fd4',
'ideogram-v2': '34ee5a83-8d85-4d8b-b2c1-3b3413e9ed98',
'cobalt-exp-beta-v11': '607ff7db-ad0e-47ff-a493-107119eed7e0',
'gemini-2.5-pro-preview-05-06': '0337ee08-8305-40c0-b820-123ad42b60cf',
'goldmane': 'e2d9d353-6dbe-4414-bf87-bd289d523726',
'dall-e-3': 'bb97bc68-131c-4ea4-a59e-03a6252de0d2',
'recraft-v3': 'b70ab012-18e7-4d6f-a887-574e05de6c20',
'redsword': '3f4930f2-8898-429b-8c46-1969edfe2e19',
'anonymous-bot-0514': 'eb5da04f-9b28-406b-bf06-4539158c66ef',
'flux-1.1-pro': '9e8525b7-fe50-4e50-bf7f-ad1d3d205d3c'
};
// 默认模型
const DEFAULT_MODEL = 'claude-opus-4-20250514';
// 从环境变量获取配置
const PORT = parseInt(Deno.env.get("PORT") || "7860");
const HOST = Deno.env.get("HOST") || "0.0.0.0";
const DEFAULT_AUTH_TOKEN = Deno.env.get("LMARENA_AUTH_TOKEN") || "";
const DEFAULT_COOKIE = Deno.env.get("LMARENA_COOKIE") || "";
// 流式聊天函数
async function chatStream(at: string, messages: any[], modelName: string = DEFAULT_MODEL): Promise<ReadableStream<Uint8Array> | null> {
const url = "https://beta.lmarena.ai/api/stream/create-evaluation";
const chatId = crypto.randomUUID();
const modelId = MODEL_MAPPING[modelName];
if (!modelId) {
console.error('不支持的模型:', modelName);
return null;
}
const processedMessages = buildMessages(messages, modelId, chatId);
const payload = buildPayload(chatId, modelId, processedMessages);
const authToken = at || DEFAULT_AUTH_TOKEN;
const cookie = DEFAULT_COOKIE || `arena-auth-prod-v1=${authToken}`;
const headers = buildHeaders(cookie);
try {
const response = await fetch(url, {
method: 'POST',
headers: headers,
body: JSON.stringify(payload),
});
if (!response.ok) {
const data = await response.text();
console.log(data);
console.error('API请求失败:', response.status, response.statusText);
return null;
}
return new ReadableStream({
start(controller) {
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let buffer = '';
const completionId = `chatcmpl-${chatId}`;
const created = Math.floor(Date.now() / 1000);
async function pump(): Promise<void> {
while (true) {
try {
const { done, value } = await reader!.read();
if (done) {
console.log('流读取完成');
const doneData = `data: [DONE]\n\n`;
controller.enqueue(new TextEncoder().encode(doneData));
controller.close();
return;
}
const text = decoder.decode(value, { stream: true });
buffer += text;
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (line.trim()) {
try {
if (line.startsWith('af:')) {
console.log('收到消息ID:', line);
} else if (line.startsWith('a0:')) {
const contentJson = line.substring(3);
const content = JSON.parse(contentJson);
const chunk = {
id: completionId,
object: "chat.completion.chunk",
created: created,
model: modelName,
choices: [
{
index: 0,
delta: {
content: content
},
finish_reason: null
}
]
};
const sseData = `data: ${JSON.stringify(chunk)}\n\n`;
controller.enqueue(new TextEncoder().encode(sseData));
} else if (line.startsWith('ae:')) {
console.log('收到结束信息:', line);
const finalChunk = {
id: completionId,
object: "chat.completion.chunk",
created: created,
model: modelName,
choices: [
{
index: 0,
delta: {},
finish_reason: "stop"
}
]
};
const finalData = `data: ${JSON.stringify(finalChunk)}\n\n`;
controller.enqueue(new TextEncoder().encode(finalData));
}
} catch (parseError) {
console.warn('解析行数据失败:', line, parseError);
}
}
}
} catch (error) {
console.error('流处理错误:', error);
const errorChunk = {
id: completionId,
object: "error",
error: {
message: error.message,
type: "stream_error"
}
};
const errorData = `data: ${JSON.stringify(errorChunk)}\n\n`;
controller.enqueue(new TextEncoder().encode(errorData));
controller.error(error);
return;
}
}
}
pump();
},
cancel() {
console.log('流被取消');
}
});
} catch (error) {
console.error('请求失败:', error);
return null;
}
}
// 非流式聊天函数
async function chatNonStream(at: string, messages: any[], modelName: string = DEFAULT_MODEL): Promise<any | null> {
const url = "https://beta.lmarena.ai/api/stream/create-evaluation";
const chatId = crypto.randomUUID();
const modelId = MODEL_MAPPING[modelName];
if (!modelId) {
console.error('不支持的模型:', modelName);
return null;
}
const processedMessages = buildMessages(messages, modelId, chatId);
const payload = buildPayload(chatId, modelId, processedMessages);
const authToken = at || DEFAULT_AUTH_TOKEN;
const cookie = DEFAULT_COOKIE || `arena-auth-prod-v1=${authToken}`;
const headers = buildHeaders(cookie);
try {
const response = await fetch(url, {
method: 'POST',
headers: headers,
body: JSON.stringify(payload),
});
if (!response.ok) {
const data = await response.text();
console.log(data);
console.error('API请求失败:', response.status, response.statusText);
return null;
}
// 收集所有流式数据
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let buffer = '';
let fullContent = '';
while (true) {
const { done, value } = await reader!.read();
if (done) {
break;
}
const text = decoder.decode(value, { stream: true });
buffer += text;
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (line.trim()) {
try {
if (line.startsWith('a0:')) {
const contentJson = line.substring(3);
const content = JSON.parse(contentJson);
fullContent += content;
}
} catch (parseError) {
console.warn('解析行数据失败:', line, parseError);
}
}
}
}
// 返回 OpenAI 格式的完整响应
return {
id: `chatcmpl-${chatId}`,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: modelName,
choices: [
{
index: 0,
message: {
role: "assistant",
content: fullContent
},
finish_reason: "stop"
}
],
usage: {
prompt_tokens: 0, // LMArena 不提供 token 计数
completion_tokens: 0,
total_tokens: 0
}
};
} catch (error) {
console.error('请求失败:', error);
return null;
}
}
// 构建消息的辅助函数
function buildMessages(messages: any[], modelId: string, chatId: string): any[] {
const processedMessages: any[] = [];
let lastMessageId = '';
for (let i = 0; i < messages.length; i++) {
const msg = messages[i];
const messageId = crypto.randomUUID();
const parentMessageIds = lastMessageId ? [lastMessageId] : [];
let status = 'pending';
if (msg.role === 'assistant' && i < messages.length - 1) {
status = 'success';
}
const processedMessage = {
"id": messageId,
"role": msg.role,
"content": msg.content || '',
"experimental_attachments": [],
"parentMessageIds": parentMessageIds,
"participantPosition": "a",
"modelId": modelId,
"evaluationSessionId": chatId,
"status": status,
"failureReason": null
};
processedMessages.push(processedMessage);
lastMessageId = messageId;
}
// 添加新的assistant消息作为待生成的回复
const newAssistantId = crypto.randomUUID();
const newAssistantMessage = {
"id": newAssistantId,
"role": "assistant",
"content": "",
"experimental_attachments": [],
"parentMessageIds": lastMessageId ? [lastMessageId] : [],
"participantPosition": "a",
"modelId": modelId,
"evaluationSessionId": chatId,
"status": "pending",
"failureReason": null
};
processedMessages.push(newAssistantMessage);
return processedMessages;
}
// 构建请求载荷的辅助函数
function buildPayload(chatId: string, modelId: string, processedMessages: any[]): any {
return {
"id": chatId,
"mode": "direct",
"modelAId": modelId,
"userMessageId": processedMessages[processedMessages.length - 2]?.id || crypto.randomUUID(),
"modelAMessageId": processedMessages[processedMessages.length - 1]?.id,
"messages": processedMessages,
"modality": "chat"
};
}
// 构建请求头的辅助函数
function buildHeaders(cookie: string): Record<string, string> {
return {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Content-Type': 'application/json',
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.9',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'Sec-Ch-Ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
'Sec-Ch-Ua-Mobile': '?0',
'Sec-Ch-Ua-Platform': '"Windows"',
'Cookie': cookie
};
}
// 健康检查页面
function getHealthPage(): string {
return `
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<title>LMArena API Proxy</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
line-height: 1.6;
color: #333;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
padding: 20px;
}
.container {
max-width: 900px;
margin: 0 auto;
background: white;
border-radius: 15px;
box-shadow: 0 20px 40px rgba(0,0,0,0.1);
overflow: hidden;
}
.header {
background: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%);
color: white;
padding: 40px;
text-align: center;
}
.header h1 {
font-size: 2.5em;
margin-bottom: 10px;
font-weight: 300;
}
.status {
font-size: 1.2em;
opacity: 0.9;
}
.content {
padding: 40px;
}
.features {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
gap: 20px;
margin-bottom: 40px;
}
.feature-card {
background: #f8f9fa;
padding: 25px;
border-radius: 10px;
text-align: center;
border-left: 4px solid #4facfe;
}
.feature-icon {
font-size: 2em;
margin-bottom: 15px;
}
.feature-title {
font-size: 1.1em;
font-weight: 600;
color: #2c3e50;
margin-bottom: 8px;
}
.feature-desc {
color: #6c757d;
font-size: 0.9em;
}
.section {
margin-bottom: 40px;
}
.section h2 {
color: #2c3e50;
margin-bottom: 20px;
font-size: 1.8em;
font-weight: 300;
border-bottom: 2px solid #e9ecef;
padding-bottom: 10px;
}
.endpoint-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 20px;
}
.endpoint {
background: #f8f9fa;
padding: 25px;
border-radius: 10px;
border-left: 4px solid #28a745;
}
.endpoint-method {
background: #28a745;
color: white;
padding: 4px 8px;
border-radius: 4px;
font-size: 0.8em;
font-weight: bold;
margin-right: 10px;
}
.endpoint-path {
font-family: 'Monaco', 'Menlo', monospace;
font-weight: bold;
color: #2c3e50;
}
.endpoint-desc {
margin-top: 10px;
color: #6c757d;
font-size: 0.9em;
}
.model-stats {
display: flex;
justify-content: space-around;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
color: white;
padding: 30px;
border-radius: 10px;
margin-bottom: 20px;
}
.stat {
text-align: center;
}
.stat-number {
font-size: 2.5em;
font-weight: bold;
display: block;
}
.stat-label {
font-size: 0.9em;
opacity: 0.9;
}
.model-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
gap: 10px;
max-height: 400px;
overflow-y: auto;
background: #f8f9fa;
padding: 20px;
border-radius: 10px;
border: 1px solid #e9ecef;
}
.model-item {
background: white;
padding: 12px;
border-radius: 6px;
font-family: 'Monaco', 'Menlo', monospace;
font-size: 0.85em;
border: 1px solid #e9ecef;
transition: all 0.2s ease;
}
.model-item:hover {
border-color: #4facfe;
box-shadow: 0 2px 8px rgba(79, 172, 254, 0.2);
}
.footer {
background: #2c3e50;
color: white;
padding: 30px;
text-align: center;
}
.footer-links {
margin-top: 15px;
}
.footer-links a {
color: #4facfe;
text-decoration: none;
margin: 0 15px;
}
.footer-links a:hover {
text-decoration: underline;
}
@media (max-width: 768px) {
.header h1 {
font-size: 2em;
}
.content {
padding: 20px;
}
.features {
grid-template-columns: 1fr;
}
.model-stats {
flex-direction: column;
gap: 20px;
}
}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🤖 LMArena API Proxy</h1>
<div class="status">✅ 服务运行正常</div>
</div>
<div class="content">
<div class="features">
<div class="feature-card">
<div class="feature-icon">🌊</div>
<div class="feature-title">流式 & 非流式</div>
<div class="feature-desc">支持实时流式输出和完整响应两种模式</div>
</div>
<div class="feature-card">
<div class="feature-icon">🔄</div>
<div class="feature-title">OpenAI 兼容</div>
<div class="feature-desc">完全兼容 OpenAI API 格式,无缝迁移</div>
</div>
<div class="feature-card">
<div class="feature-icon">🎯</div>
<div class="feature-title">多模型支持</div>
<div class="feature-desc">支持 Claude、GPT、Gemini 等 50+ 模型</div>
</div>
<div class="feature-card">
<div class="feature-icon">🚀</div>
<div class="feature-title">高性能</div>
<div class="feature-desc">基于 Deno 运行时,快速稳定</div>
</div>
</div>
<div class="section">
<h2>📡 API 接口</h2>
<div class="endpoint-grid">
<div class="endpoint">
<div>
<span class="endpoint-method">POST</span>
<span class="endpoint-path">/v1/chat/completions</span>
</div>
<div class="endpoint-desc">
聊天完成接口,支持流式和非流式输出<br>
参数:stream=true/false
</div>
</div>
<div class="endpoint">
<div>
<span class="endpoint-method">GET</span>
<span class="endpoint-path">/v1/models</span>
</div>
<div class="endpoint-desc">
获取支持的模型列表<br>
返回所有可用模型信息
</div>
</div>
</div>
</div>
<div class="section">
<h2>🎯 模型统计</h2>
<div class="model-stats">
<div class="stat">
<span class="stat-number">${Object.keys(MODEL_MAPPING).length}</span>
<span class="stat-label">支持模型</span>
</div>
<div class="stat">
<span class="stat-number">5</span>
<span class="stat-label">主要厂商</span>
</div>
<div class="stat">
<span class="stat-number">24/7</span>
<span class="stat-label">服务可用</span>
</div>
</div>
<div class="model-grid">
${Object.keys(MODEL_MAPPING).map(model => `<div class="model-item">${model}</div>`).join('')}
</div>
</div>
</div>
<div class="footer">
<div>© 2025 LMArena API Proxy - 高性能 AI 模型代理服务</div>
<div class="footer-links">
<a href="/v1/models">模型列表</a>
<a href="https://github.com">GitHub</a>
<a href="https://docs.openai.com/api-reference">API 文档</a>
</div>
</div>
</div>
</body>
</html>
`;
}
// Deno HTTP服务器
Deno.serve({ port: PORT, hostname: HOST }, async (req: Request) => {
const url = new URL(req.url);
// 处理CORS
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
};
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
if (req.method === 'POST' && url.pathname === '/v1/chat/completions') {
try {
const body = await req.json();
// 支持OpenAI兼容格式
let at: string | undefined;
let messages: any[] | undefined;
let modelName: string = DEFAULT_MODEL;
let isStream: boolean = body.stream !== false; // 默认为流式
// 检查是否有Authorization header (Bearer token)
const authHeader = req.headers.get('Authorization');
if (authHeader && authHeader.startsWith('Bearer ')) {
at = authHeader.substring(7);
}
// 如果没有Authorization header,尝试从body中获取at参数
if (!at && body.at) {
at = body.at;
}
// 如果还没有认证信息,使用环境变量中的默认值
if (!at) {
at = DEFAULT_AUTH_TOKEN;
}
// 获取模型名称
if (body.model && typeof body.model === 'string') {
modelName = body.model;
// 验证模型是否支持
if (!MODEL_MAPPING[modelName]) {
return new Response(
JSON.stringify({
error: {
message: `不支持的模型: ${modelName}`,
type: 'invalid_request_error',
code: 'model_not_found'
}
}),
{
status: 400,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
}
// 从OpenAI格式的messages中提取完整对话历史
if (body.messages && Array.isArray(body.messages)) {
messages = body.messages;
}
// 如果没有从messages中找到,尝试旧格式的query参数
if (!messages && body.query) {
messages = [{ role: 'user', content: body.query }];
}
if (!messages) {
return new Response(
JSON.stringify({
error: {
message: '缺少必需参数: 需要messages数组或query参数',
type: 'invalid_request_error'
}
}),
{
status: 400,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
// 根据 stream 参数选择处理方式
if (isStream) {
// 流式输出
const stream = await chatStream(at, messages, modelName);
if (!stream) {
return new Response(
JSON.stringify({
error: {
message: '无法获取流式响应',
type: 'internal_server_error'
}
}),
{
status: 500,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
// 返回流式响应
return new Response(stream, {
headers: {
...corsHeaders,
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'X-Accel-Buffering': 'no',
'Transfer-Encoding': 'chunked'
}
});
} else {
// 非流式输出
const result = await chatNonStream(at, messages, modelName);
if (!result) {
return new Response(
JSON.stringify({
error: {
message: '无法获取响应',
type: 'internal_server_error'
}
}),
{
status: 500,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
// 返回非流式响应
return new Response(
JSON.stringify(result),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
} catch (error) {
return new Response(
JSON.stringify({
error: {
message: '请求处理失败',
type: 'internal_server_error',
details: error.message
}
}),
{
status: 500,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
}
// 模型列表API (OpenAI兼容)
if (req.method === 'GET' && url.pathname === '/v1/models') {
const models = Object.keys(MODEL_MAPPING).map(modelName => ({
id: modelName,
object: "model",
created: Math.floor(Date.now() / 1000),
owned_by: "lmarena",
permission: [
{
id: `modelperm-${crypto.randomUUID()}`,
object: "model_permission",
created: Math.floor(Date.now() / 1000),
allow_create_engine: false,
allow_sampling: true,
allow_logprobs: false,
allow_search_indices: false,
allow_view: true,
allow_fine_tuning: false,
organization: "*",
group: null,
is_blocking: false
}
],
root: modelName,
parent: null
}));
return new Response(
JSON.stringify({
object: "list",
data: models
}),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
// 根路径返回健康检查页面
if (url.pathname === '/') {
return new Response(
getHealthPage(),
{
headers: { ...corsHeaders, 'Content-Type': 'text/html; charset=utf-8' }
}
);
}
return new Response(
JSON.stringify({
error: {
message: '404 Not Found',
type: 'not_found_error'
}
}),
{
status: 404,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
});
console.log(`🚀 LMArena API Proxy 服务已启动`);
console.log(`📡 监听地址: http://${HOST}:${PORT}`);
console.log(`🔧 支持的模型数量: ${Object.keys(MODEL_MAPPING).length}`);
console.log(`🌐 健康检查: http://${HOST}:${PORT}/`);
console.log(`✨ 支持流式和非流式输出`);