Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,13 +7,13 @@ def greet_json():
|
|
| 7 |
return {"status": "Its working built by Fayaz"}
|
| 8 |
|
| 9 |
# Initialize the text generation pipeline
|
| 10 |
-
|
| 11 |
# checking for ofingpt
|
| 12 |
# ofintech/FinGPT_0.1.3
|
| 13 |
# pipe = pipeline("text2text-generation", model="MudassirFayaz/llama-2-7b_career_0.6.0", trust_remote_code=True)
|
| 14 |
# Initialize the text generation pipeline
|
| 15 |
# model = AutoModelForSeq2SeqLM.from_pretrained("ofintech/FinGPT_0.1.3")
|
| 16 |
-
pipe = pipeline("text2text-generation", model="MudassirFayaz/llama-2-7b_career_0.6.0", tokenizer=tokenizer)
|
| 17 |
|
| 18 |
|
| 19 |
@app.get("/")
|
|
|
|
| 7 |
return {"status": "Its working built by Fayaz"}
|
| 8 |
|
| 9 |
# Initialize the text generation pipeline
|
| 10 |
+
pipe = pipeline("text2text-generation", model="google/flan-t5-small")
|
| 11 |
# checking for ofingpt
|
| 12 |
# ofintech/FinGPT_0.1.3
|
| 13 |
# pipe = pipeline("text2text-generation", model="MudassirFayaz/llama-2-7b_career_0.6.0", trust_remote_code=True)
|
| 14 |
# Initialize the text generation pipeline
|
| 15 |
# model = AutoModelForSeq2SeqLM.from_pretrained("ofintech/FinGPT_0.1.3")
|
| 16 |
+
# pipe = pipeline("text2text-generation", model="MudassirFayaz/llama-2-7b_career_0.6.0", tokenizer=tokenizer)
|
| 17 |
|
| 18 |
|
| 19 |
@app.get("/")
|