Laiba-Huggingface commited on
Commit
340e2da
·
verified ·
1 Parent(s): d39bcf3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -8
app.py CHANGED
@@ -6,26 +6,37 @@ from transformers import MarianMTModel, MarianTokenizer
6
  import os
7
  os.system("pip install sentencepiece")
8
 
9
- # Load models and tokenizers
10
- model_en_to_ur = MarianMTModel.from_pretrained("Helsinki-NLP/opus-mt-en-ur")
 
 
 
11
  tokenizer_en_to_ur = MarianTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-ur")
12
 
13
- model_ur_to_en = MarianMTModel.from_pretrained("Helsinki-NLP/opus-mt-ur-en")
14
  tokenizer_ur_to_en = MarianTokenizer.from_pretrained("Helsinki-NLP/opus-mt-ur-en")
15
 
 
 
 
 
 
16
  # Function to translate text
17
  def translate(text, direction):
 
 
 
18
  if direction == "English to Urdu":
19
  tokenizer, model = tokenizer_en_to_ur, model_en_to_ur
20
  else:
21
  tokenizer, model = tokenizer_ur_to_en, model_ur_to_en
22
 
23
- # Tokenize input text
24
- inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
25
 
26
  # Generate translation
27
  with torch.no_grad():
28
- translated = model.generate(**inputs)
29
 
30
  # Decode output text
31
  output = tokenizer.decode(translated[0], skip_special_tokens=True)
@@ -39,8 +50,8 @@ interface = gr.Interface(
39
  gr.Radio(["English to Urdu", "Urdu to English"], label="Translation Direction", value="English to Urdu"),
40
  ],
41
  outputs=gr.Textbox(label="Translated Text"),
42
- title="English ↔ Urdu Translator",
43
- description="Translate text between English and Urdu using a neural machine translation model.",
44
  )
45
 
46
  # Launch the Gradio app
 
6
  import os
7
  os.system("pip install sentencepiece")
8
 
9
+ # Check if GPU is available and use it
10
+ device = "cuda" if torch.cuda.is_available() else "cpu"
11
+
12
+ # Load models and tokenizers once (globally)
13
+ model_en_to_ur = MarianMTModel.from_pretrained("Helsinki-NLP/opus-mt-en-ur").to(device)
14
  tokenizer_en_to_ur = MarianTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-ur")
15
 
16
+ model_ur_to_en = MarianMTModel.from_pretrained("Helsinki-NLP/opus-mt-ur-en").to(device)
17
  tokenizer_ur_to_en = MarianTokenizer.from_pretrained("Helsinki-NLP/opus-mt-ur-en")
18
 
19
+ # Apply torch.compile() for optimization (if using PyTorch 2.0+)
20
+ if torch.__version__ >= "2.0":
21
+ model_en_to_ur = torch.compile(model_en_to_ur)
22
+ model_ur_to_en = torch.compile(model_ur_to_en)
23
+
24
  # Function to translate text
25
  def translate(text, direction):
26
+ if not text.strip():
27
+ return "Please enter some text to translate."
28
+
29
  if direction == "English to Urdu":
30
  tokenizer, model = tokenizer_en_to_ur, model_en_to_ur
31
  else:
32
  tokenizer, model = tokenizer_ur_to_en, model_ur_to_en
33
 
34
+ # Tokenize input text (optimized padding)
35
+ inputs = tokenizer(text, return_tensors="pt", padding="longest", truncation=True).to(device)
36
 
37
  # Generate translation
38
  with torch.no_grad():
39
+ translated = model.generate(**inputs, max_length=512)
40
 
41
  # Decode output text
42
  output = tokenizer.decode(translated[0], skip_special_tokens=True)
 
50
  gr.Radio(["English to Urdu", "Urdu to English"], label="Translation Direction", value="English to Urdu"),
51
  ],
52
  outputs=gr.Textbox(label="Translated Text"),
53
+ title="⚡ Fast English ↔ Urdu Translator",
54
+ description="Translate text between English and Urdu quickly using a neural machine translation model with GPU acceleration.",
55
  )
56
 
57
  # Launch the Gradio app