import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer import torch # 选择 DeepSeek 数学推理模型 MODEL_NAME = "deepseek-ai/deepseek-math-7b-instruct" # 加载 Tokenizer 和 Model tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained( MODEL_NAME, torch_dtype=torch.float16, # 低精度加速推理 device_map="auto" # 自动分配 GPU / CPU ) # 生成数学解答 def solve_math_problem(problem): prompt = f"请解答以下数学题,并给出详细步骤:\n{problem}\n答案:" inputs = tokenizer(prompt, return_tensors="pt").to(model.device) with torch.no_grad(): outputs = model.generate(**inputs, max_new_tokens=256) # 限制输出长度 answer = tokenizer.decode(outputs[0], skip_special_tokens=True) # 只提取模型生成的解答部分(去掉 prompt) return answer.split("答案:")[-1].strip() # Gradio 界面 iface = gr.Interface( fn=solve_math_problem, inputs="text", outputs="text", title="初中数学解题 AI", description="输入数学题目,AI 将给出详细的解答步骤。", examples=[ ["解一元二次方程 x² - 5x + 6 = 0"], ["计算 3/4 + 5/6 的值"], ["求直角三角形,已知两边分别为 3 和 4,求斜边长度"] ] ) iface.launch()