Spaces:
Runtime error
Runtime error
Sanchit Gandhi
commited on
Commit
·
7776945
1
Parent(s):
2f18cd6
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,11 +18,18 @@ Options:
|
|
| 18 |
- greedy: accurate completions (may be more boring or have repetitions)
|
| 19 |
"""
|
| 20 |
|
| 21 |
-
wip_description = """JAX / Flax
|
| 22 |
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
"""
|
| 27 |
|
| 28 |
API_URL = os.getenv("API_URL")
|
|
@@ -33,7 +40,7 @@ examples = [
|
|
| 33 |
64, "sampling", True],
|
| 34 |
['A poem about the beauty of science by Alfred Edgar Brittle\nTitle: The Magic Craft\nIn the old times', 64,
|
| 35 |
"sampling", True],
|
| 36 |
-
['استخراج العدد العاملي في لغة بايثون:', 64, "
|
| 37 |
["Pour déguster un ortolan, il faut tout d'abord", 64, "sampling", True],
|
| 38 |
[
|
| 39 |
'Traduce español de España a español de Argentina\nEl coche es rojo - el auto es rojo\nEl ordenador es nuevo - la computadora es nueva\nel boligrafo es negro -',
|
|
@@ -41,7 +48,7 @@ examples = [
|
|
| 41 |
[
|
| 42 |
'Estos ejemplos quitan vocales de las palabras\nEjemplos:\nhola - hl\nmanzana - mnzn\npapas - pps\nalacran - lcrn\npapa -',
|
| 43 |
64, "sampling", True],
|
| 44 |
-
["Question: If I put cheese into the fridge, will it melt?\nAnswer:", 64, "
|
| 45 |
["Math exercise - answers:\n34+10=44\n54+20=", 64, "sampling", True],
|
| 46 |
[
|
| 47 |
"Question: Where does the Greek Goddess Persephone spend half of the year when she is not with her mother?\nAnswer:",
|
|
@@ -60,10 +67,9 @@ def query(payload):
|
|
| 60 |
|
| 61 |
|
| 62 |
def inference(input_sentence, max_length, sample_or_greedy, raw_text=True):
|
| 63 |
-
do_sample = sample_or_greedy == "sampling"
|
| 64 |
payload = {
|
| 65 |
"inputs": input_sentence,
|
| 66 |
-
"do_sample":
|
| 67 |
# "max_new_tokens": max_length
|
| 68 |
}
|
| 69 |
|
|
@@ -106,7 +112,7 @@ def inference(input_sentence, max_length, sample_or_greedy, raw_text=True):
|
|
| 106 |
init_font_size=142,
|
| 107 |
right_align=False,
|
| 108 |
)
|
| 109 |
-
return img, data[0]['generated_text'][0]
|
| 110 |
|
| 111 |
|
| 112 |
gr.Interface(
|
|
@@ -114,7 +120,7 @@ gr.Interface(
|
|
| 114 |
[
|
| 115 |
gr.inputs.Textbox(label="Input"),
|
| 116 |
gr.inputs.Radio([64], default=64, label="Tokens to generate"),
|
| 117 |
-
gr.inputs.Radio(["sampling"
|
| 118 |
gr.Checkbox(label="Just output raw text", value=True),
|
| 119 |
],
|
| 120 |
["image", "text"],
|
|
|
|
| 18 |
- greedy: accurate completions (may be more boring or have repetitions)
|
| 19 |
"""
|
| 20 |
|
| 21 |
+
wip_description = """Gradio Demo for JAX / Flax BLOOM. The 176B BLOOM model running on a TPU v3-256 pod, with 2D model parallelism and custom mesh axes.
|
| 22 |
|
| 23 |
+
|
| 24 |
+
Note: rendering of the screenshot is currently not optimised. To experience the true speed of JAX / Flax, tick 'just output raw text'.
|
| 25 |
+
|
| 26 |
+
Tips:
|
| 27 |
+
- Do NOT talk to BLOOM as an entity, it's not a chatbot but a webpage/blog/article completion model.
|
| 28 |
+
- For the best results: MIMIC a few sentences of a webpage similar to the content you want to generate.
|
| 29 |
+
Start a paragraph as if YOU were writing a blog, webpage, math post, coding article and BLOOM will generate a coherent follow-up. Longer prompts usually give more interesting results.
|
| 30 |
+
Options:
|
| 31 |
+
- sampling: imaginative completions (may be not super accurate e.g. math/history)
|
| 32 |
+
- greedy: accurate completions (may be more boring or have repetitions)
|
| 33 |
"""
|
| 34 |
|
| 35 |
API_URL = os.getenv("API_URL")
|
|
|
|
| 40 |
64, "sampling", True],
|
| 41 |
['A poem about the beauty of science by Alfred Edgar Brittle\nTitle: The Magic Craft\nIn the old times', 64,
|
| 42 |
"sampling", True],
|
| 43 |
+
['استخراج العدد العاملي في لغة بايثون:', 64, "sampling", True],
|
| 44 |
["Pour déguster un ortolan, il faut tout d'abord", 64, "sampling", True],
|
| 45 |
[
|
| 46 |
'Traduce español de España a español de Argentina\nEl coche es rojo - el auto es rojo\nEl ordenador es nuevo - la computadora es nueva\nel boligrafo es negro -',
|
|
|
|
| 48 |
[
|
| 49 |
'Estos ejemplos quitan vocales de las palabras\nEjemplos:\nhola - hl\nmanzana - mnzn\npapas - pps\nalacran - lcrn\npapa -',
|
| 50 |
64, "sampling", True],
|
| 51 |
+
["Question: If I put cheese into the fridge, will it melt?\nAnswer:", 64, "sampling", True],
|
| 52 |
["Math exercise - answers:\n34+10=44\n54+20=", 64, "sampling", True],
|
| 53 |
[
|
| 54 |
"Question: Where does the Greek Goddess Persephone spend half of the year when she is not with her mother?\nAnswer:",
|
|
|
|
| 67 |
|
| 68 |
|
| 69 |
def inference(input_sentence, max_length, sample_or_greedy, raw_text=True):
|
|
|
|
| 70 |
payload = {
|
| 71 |
"inputs": input_sentence,
|
| 72 |
+
"do_sample": True,
|
| 73 |
# "max_new_tokens": max_length
|
| 74 |
}
|
| 75 |
|
|
|
|
| 112 |
init_font_size=142,
|
| 113 |
right_align=False,
|
| 114 |
)
|
| 115 |
+
return img, data[0]['generated_text'][0]
|
| 116 |
|
| 117 |
|
| 118 |
gr.Interface(
|
|
|
|
| 120 |
[
|
| 121 |
gr.inputs.Textbox(label="Input"),
|
| 122 |
gr.inputs.Radio([64], default=64, label="Tokens to generate"),
|
| 123 |
+
gr.inputs.Radio(["sampling"], label="Sample or greedy", default="sampling"),
|
| 124 |
gr.Checkbox(label="Just output raw text", value=True),
|
| 125 |
],
|
| 126 |
["image", "text"],
|