Spaces:
Sleeping
Sleeping
Respond with Special Tokens
Browse files
main.py
CHANGED
@@ -80,7 +80,7 @@ async def generate_text(item: Item):
|
|
80 |
pad_token_id=tokenizer.eos_token_id # Set this to suppress warning
|
81 |
)
|
82 |
|
83 |
-
resp = tokenizer.decode(outputs[0], skip_special_tokens=
|
84 |
|
85 |
|
86 |
return {"response": resp}
|
|
|
80 |
pad_token_id=tokenizer.eos_token_id # Set this to suppress warning
|
81 |
)
|
82 |
|
83 |
+
resp = tokenizer.decode(outputs[0], skip_special_tokens=False)
|
84 |
|
85 |
|
86 |
return {"response": resp}
|