NeoZ123 commited on
Commit
d0a9d87
·
verified ·
1 Parent(s): 35afe79

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -26,7 +26,7 @@ A simple demo for deployment of the model:
26
  import torch
27
  from transformers import AutoTokenizer, AutoModelForCausalLM
28
 
29
- model_path = "THUDM/LongReward-glm4-9b-SFT"
30
  tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
31
  model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map='auto')
32
  context = '''
@@ -43,7 +43,7 @@ You can also deploy the model with [vllm](https://github.com/vllm-project/vllm)
43
  import torch
44
  from vllm import LLM, SamplingParams
45
 
46
- model_path = "THUDM/LongReward-glm4-9b-SFT"
47
  model = LLM(
48
  model= model_path,
49
  dtype=torch.bfloat16,
 
26
  import torch
27
  from transformers import AutoTokenizer, AutoModelForCausalLM
28
 
29
+ model_path = "NeoZ123/LongReward-glm4-9b-SFT"
30
  tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
31
  model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map='auto')
32
  context = '''
 
43
  import torch
44
  from vllm import LLM, SamplingParams
45
 
46
+ model_path = "NeoZ123/LongReward-glm4-9b-SFT"
47
  model = LLM(
48
  model= model_path,
49
  dtype=torch.bfloat16,