nielsr HF Staff commited on
Commit
b26988e
·
verified ·
1 Parent(s): 09ae5dd

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -3
README.md CHANGED
@@ -72,9 +72,7 @@ sampling_params = SamplingParams(
72
  max_tokens=8192,
73
  )
74
  llm = LLM(model="Unbabel/Tower-Plus-9B", tensor_parallel_size=1)
75
- messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):
76
- English: Hello world!
77
- Portuguese (Portugal): "}]
78
  outputs = llm.chat(messages, sampling_params)
79
  # Make sure your prompt_token_ids look like this
80
  print (outputs[0].outputs[0].text)
 
72
  max_tokens=8192,
73
  )
74
  llm = LLM(model="Unbabel/Tower-Plus-9B", tensor_parallel_size=1)
75
+ messages = [{"role": "user", "content": "Translate the following English source text to Portuguese (Portugal):\nEnglish: Hello world!\nPortuguese (Portugal): "}]
 
 
76
  outputs = llm.chat(messages, sampling_params)
77
  # Make sure your prompt_token_ids look like this
78
  print (outputs[0].outputs[0].text)