Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -7,17 +7,21 @@ from markdownify import markdownify
|
|
7 |
|
8 |
models = {
|
9 |
"jinaai/reader-lm-0.5b": AutoModelForCausalLM.from_pretrained("jinaai/reader-lm-0.5b", trust_remote_code=True).eval().to("cuda"),
|
10 |
-
"jinaai/reader-lm-1.5b": AutoModelForCausalLM.from_pretrained("jinaai/reader-lm-1.5b", trust_remote_code=True).eval().to("cuda")
|
|
|
|
|
11 |
}
|
12 |
|
13 |
tokenizers = {
|
14 |
"jinaai/reader-lm-0.5b": AutoTokenizer.from_pretrained("jinaai/reader-lm-0.5b", trust_remote_code=True),
|
15 |
"jinaai/reader-lm-1.5b": AutoTokenizer.from_pretrained("jinaai/reader-lm-1.5b", trust_remote_code=True),
|
|
|
|
|
16 |
}
|
17 |
|
18 |
|
19 |
@spaces.GPU
|
20 |
-
def run_example(html_content, model_id="jinaai/
|
21 |
print("Start Model Processing")
|
22 |
model = models[model_id]
|
23 |
tokenizer = tokenizers[model_id]
|
|
|
7 |
|
8 |
models = {
|
9 |
"jinaai/reader-lm-0.5b": AutoModelForCausalLM.from_pretrained("jinaai/reader-lm-0.5b", trust_remote_code=True).eval().to("cuda"),
|
10 |
+
"jinaai/reader-lm-1.5b": AutoModelForCausalLM.from_pretrained("jinaai/reader-lm-1.5b", trust_remote_code=True).eval().to("cuda"),
|
11 |
+
"jinaai/ReaderLM-v2": AutoModelForCausalLM.from_pretrained("jinaai/ReaderLM-v2", trust_remote_code=True).eval().to("cuda")
|
12 |
+
|
13 |
}
|
14 |
|
15 |
tokenizers = {
|
16 |
"jinaai/reader-lm-0.5b": AutoTokenizer.from_pretrained("jinaai/reader-lm-0.5b", trust_remote_code=True),
|
17 |
"jinaai/reader-lm-1.5b": AutoTokenizer.from_pretrained("jinaai/reader-lm-1.5b", trust_remote_code=True),
|
18 |
+
"jinaai/ReaderLM-v2": AutoTokenizer.from_pretrained("jinaai/ReaderLM-v2", trust_remote_code=True),
|
19 |
+
|
20 |
}
|
21 |
|
22 |
|
23 |
@spaces.GPU
|
24 |
+
def run_example(html_content, model_id="jinaai/ReaderLM-v2"):
|
25 |
print("Start Model Processing")
|
26 |
model = models[model_id]
|
27 |
tokenizer = tokenizers[model_id]
|