beshiribrahim commited on
Commit
4752b9d
·
verified ·
1 Parent(s): 0976636

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +38 -1
README.md CHANGED
@@ -4,4 +4,41 @@ language:
4
  - tig
5
  base_model:
6
  - meta-llama/Llama-3.2-1B
7
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  - tig
5
  base_model:
6
  - meta-llama/Llama-3.2-1B
7
+ ---
8
+
9
+ import torch, logging
10
+ from transformers import AutoModelForCausalLM, AutoTokenizer
11
+
12
+ tig_model_path = "BeitTigreAI/tigre-llm-Llama3.2-1B"
13
+
14
+ # Set the device for computation
15
+ device = "cuda" if torch.cuda.is_available() else "cpu"
16
+
17
+ # Load the tokenizer and model from the specified path
18
+ tokenizer = AutoTokenizer.from_pretrained(tig_model_path)
19
+ model = AutoModelForCausalLM.from_pretrained(tig_model_path, device_map="auto")
20
+ model = model.to(device)
21
+
22
+ # Suppress some of the logging for a cleaner output
23
+ logging.getLogger("transformers").setLevel(logging.ERROR)
24
+
25
+ # Example 1: Generate text in Tigre (written in Ethiopic script)
26
+ prompt = "[tig_Ethi]መርሐበ ብኩም"
27
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
28
+ outputs = model.generate(**inputs, max_new_tokens=50)
29
+ print("Tigre Output:")
30
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
31
+
32
+ # Example 2: Generate text in Arabic
33
+ prompt = "ما الذي يميز لغة التغري؟"
34
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
35
+ outputs = model.generate(**inputs, max_new_tokens=40)
36
+ print("\nArabic Output:")
37
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
38
+
39
+ # Example 3: Generate text in English
40
+ prompt = "[eng_Latn] What is interesting about the Tigre language?"
41
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
42
+ outputs = model.generate(**inputs, max_new_tokens=40)
43
+ print("\nEnglish Output:")
44
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))