File size: 2,439 Bytes
9b5b26a c19d193 6aae614 0313d35 e18b0a9 8fe992b 9b5b26a 6aae614 ae7a494 e121372 bf6d34c 29ec968 fe328e0 13d500a 8c01ffb dfcee3d 337d2cc dfcee3d 337d2cc dfcee3d 0639dcb 3ac67d7 dfcee3d 8357a8c dfcee3d 8c01ffb 861422e 5335d9e 8c01ffb 8fe992b dfcee3d 8c01ffb 5335d9e 861422e 8fe992b 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
import pandas as pd
from huggingface_hub import InferenceClient
from Gradio_UI import GradioUI
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
@tool
def f1_tackinfo_getter(country: str)-> str: #it's import to specify the return type
"""
Returns data for a specified race
Args:
country: A string respresenting a valid country name from the 2024 F1 calendar.
Returns:
A string with information about the given race
"""
df = pd.read_csv('./Formula1_2024season_raceResults.csv')
# Select only few relevant columns
df = df[['Track', 'Position', 'Driver', 'Team']]
info = str(df.groupby('Track').get_group(country).iloc[:10])
client = InferenceClient("meta-llama/Llama-3.2-3B-Instruct")
system_prompt = "You are an expert in F1 race analysis. You will be given data about a race and your goal is to provide a very concise analysis of these results. Your answer should be no more that 5 sentences long. Your answer should only be based on the provided information, do not add anything that is not in the data"
output = client.chat.completions.create(
messages = [
{'role': 'system', 'content': system_prompt},
{'role': 'user', 'content': f'Here is the data about the race: {info}'}
]
)
return output.choices[0].message.content
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[f1_tackinfo_getter, final_answer], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name='PitBot',
description="A Formula 1 race analysis assistant that provides detailed insights about the 2024 F1 season races.",
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |