Spaces:
Running
Running
✨ feat: add support for Ollama and Transformers models in app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, LiteLLMModel
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
@@ -56,6 +56,24 @@ model = LiteLLMModel(
|
|
56 |
api_key=os.getenv("LITELLM_API_KEY")
|
57 |
)
|
58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
with open("prompts.yaml", 'r') as stream:
|
60 |
prompt_templates = yaml.safe_load(stream)
|
61 |
|
|
|
1 |
+
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, LiteLLMModel, TransformersModel
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
|
|
56 |
api_key=os.getenv("LITELLM_API_KEY")
|
57 |
)
|
58 |
|
59 |
+
# ollama
|
60 |
+
# model = LiteLLMModel(
|
61 |
+
# model_id="ollama_chat/deepseek-r1:7b",
|
62 |
+
# max_tokens=2096,
|
63 |
+
# temperature=0.6,
|
64 |
+
# api_base="http://localhost:11434",
|
65 |
+
# num_ctx=8192
|
66 |
+
# )
|
67 |
+
|
68 |
+
# transformer
|
69 |
+
# model = TransformersModel(
|
70 |
+
# model_id="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
71 |
+
# device_map="auto",
|
72 |
+
# torch_dtype="auto",
|
73 |
+
# max_new_tokens=2096,
|
74 |
+
# temperature=0.6,
|
75 |
+
# )
|
76 |
+
|
77 |
with open("prompts.yaml", 'r') as stream:
|
78 |
prompt_templates = yaml.safe_load(stream)
|
79 |
|