Update README.md
Browse files
    	
        README.md
    CHANGED
    
    | @@ -33,7 +33,8 @@ tokenizer = AutoTokenizer.from_pretrained(model_path, padding_side='left') | |
| 33 |  | 
| 34 | 
             
            en_text = 'Hi.'
         | 
| 35 | 
             
            ko_text = 'μλ
νμΈμ.'
         | 
| 36 | 
            -
             | 
|  | |
| 37 | 
             
            ko_prompt = f"Translate this from Korean to English:\nKorean: {ko_text}\nEnglish:"
         | 
| 38 |  | 
| 39 | 
             
            input_ids = tokenizer(en_prompt, return_tensors="pt", padding=True, max_length=256, truncation=True).input_ids.cuda()
         | 
|  | |
| 33 |  | 
| 34 | 
             
            en_text = 'Hi.'
         | 
| 35 | 
             
            ko_text = 'μλ
νμΈμ.'
         | 
| 36 | 
            +
             | 
| 37 | 
            +
            en_prompt = f"Translate this from English to Korean:\nEnglish: {en_text}\nKorean:"
         | 
| 38 | 
             
            ko_prompt = f"Translate this from Korean to English:\nKorean: {ko_text}\nEnglish:"
         | 
| 39 |  | 
| 40 | 
             
            input_ids = tokenizer(en_prompt, return_tensors="pt", padding=True, max_length=256, truncation=True).input_ids.cuda()
         |