Trendyol LLM v4.0
Collection
Qwen2.5-7B based finetuned models
•
1 item
•
Updated
•
2
Trendyol LLM v4.1.0 is a generative model based on Trendyol LLM base v4.0 (continued pretraining version of Qwen2.5 7B on 13 billion tokens) model. This is the repository for the chat model.
Keynotes:
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import torch
model_id = "Trendyol/Trendyol-LLM-7B-chat-v4.1.0"
pipe = pipeline(
"text-generation",
model=model_id,
model_kwargs={
"torch_dtype": torch.bfloat16,
"use_cache":True,
"use_flash_attention_2": True
},
device_map='auto',
)
sampling_params = dict(do_sample=True, temperature=0.3, top_k=50, top_p=0.9, repetition_penalty=1.1)
DEFAULT_SYSTEM_PROMPT = "Sen yardımsever bir asistansın ve sana verilen talimatlar doğrultusunda en iyi cevabı üretmeye çalışacaksın."
messages = [
{"role": "system", "content": DEFAULT_SYSTEM_PROMPT},
{"role": "user", "content": "Türkiye'de kaç il var?"}
]
outputs = pipe(
messages,
max_new_tokens=1024,
return_full_text=False,
**sampling_params
)
print(outputs[0]["generated_text"])
tools = [
{
"name": "get_city_count",
"description": "Get current city count of given country.",
"parameters": {
"type": "object",
"properties": {
"country_name": {
"type": "string",
"description": 'The name of the country to get the count for.',
},
},
"required": ["country_name"],
},
},
{
"name": "get_temperature_date",
"description": "Get temperature at a location and date.",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": 'The location to get the temperature for, in the format "City, State, Country".',
},
"date": {
"type": "string",
"description": 'The date to get the temperature for, in the format "Year-Month-Day".',
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": 'The unit to return the temperature in. Defaults to "celsius".',
},
},
"required": ["location", "date"],
},
},
]
messages = [
{"role": "system", "content": "Sen, aşağıdaki fonksiyonlara erişimi olan yardımcı bir asistansın. Gerektiğinde bunları kullanabilirsin -"},
{"role": "user", "content": "Türkiye'de kaç il var?"}
]
text = pipe.tokenizer.apply_chat_template(messages, tools=tools, add_generation_prompt=True, tokenize=False)
inputs = pipe.tokenizer(text, return_tensors="pt").to(pipe.model.device)
outputs = pipe.model.generate(**inputs, max_new_tokens=512)
output_text = pipe.tokenizer.batch_decode(outputs)[0][len(text):]
print(output_text)
# '<function>{"name": "get_city_count", "arguments": \'{"country_name": "Turkey"}\'}</function><|im_end|>'