Spaces:
Configuration error
Configuration error
| import os | |
| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' | |
| # ใขใใซใจใใผใฏใใคใถใผใฎใญใผใ | |
| model_name = "cyberagent/open-calm-large" | |
| # model_name = "cyberagent/open-calm-3b" | |
| model = AutoModelForCausalLM.from_pretrained(model_name).to(torch.device("cuda" if torch.cuda.is_available() else "cpu")) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| # ใใฃใใใฎ้ๅง | |
| print("ใใใ: ใใใซใกใฏ! ไฝใ่ณชๅใใใใพใใ?") | |
| while True: | |
| # ใฆใผใถใใใฎๅ ฅๅใๅใๅใ | |
| user_input = input("ใใชใ: ") | |
| # ใฆใผใถใฎๅ ฅๅใใจใณใณใผใใใฆใใณใฝใซใซๅคๆ | |
| input_ids = tokenizer.encode(user_input, return_tensors='pt') | |
| attention_mask = (input_ids != tokenizer.pad_token_id if tokenizer.pad_token_id is not None else 0).int() | |
| # ๅ ฅๅใใผใฟใ้ฉๅใชใใใคในใซ้ใ | |
| input_ids = input_ids.to(model.device) | |
| attention_mask = attention_mask.to(model.device) | |
| # ใขใใซใซใใๅฟ็ญใฎ็ๆ | |
| output = model.generate(input_ids, attention_mask=attention_mask, max_length=300, num_return_sequences=1, no_repeat_ngram_size=2, pad_token_id=model.config.eos_token_id) | |
| # ็ๆใใใใใญในใใฎใใณใผใ | |
| output_text = tokenizer.decode(output[:, input_ids.shape[-1]:][0], skip_special_tokens=True) | |
| print("ใใใ: " + output_text) | |