from transformers import AutoTokenizer, AutoModelForCausalLM model_name = "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T" # Or local path if downloaded tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) # Example: Use a line from your cleaned file as a prompt with open("skin_disease_articles_clean.txt", "r", encoding="utf-8") as f: prompt = f.readline().strip() inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(**inputs, max_new_tokens=100) print(tokenizer.decode(outputs[0], skip_special_tokens=True))