remove trust remote code
Browse files
README.md
CHANGED
|
@@ -26,7 +26,7 @@ from zipnn import zipnn_hf
|
|
| 26 |
|
| 27 |
zipnn_hf()
|
| 28 |
|
| 29 |
-
pipe = pipeline("text-generation", model="royleibov/Jamba-v0.1-ZipNN-Compressed"
|
| 30 |
```
|
| 31 |
```python
|
| 32 |
# Load model directly
|
|
@@ -35,8 +35,8 @@ from zipnn import zipnn_hf
|
|
| 35 |
|
| 36 |
zipnn_hf()
|
| 37 |
|
| 38 |
-
tokenizer = AutoTokenizer.from_pretrained("royleibov/Jamba-v0.1-ZipNN-Compressed"
|
| 39 |
-
model = AutoModelForCausalLM.from_pretrained("royleibov/Jamba-v0.1-ZipNN-Compressed"
|
| 40 |
```
|
| 41 |
### ZipNN
|
| 42 |
ZipNN also allows you to seemlessly save local disk space in your cache after the model is downloaded.
|
|
|
|
| 26 |
|
| 27 |
zipnn_hf()
|
| 28 |
|
| 29 |
+
pipe = pipeline("text-generation", model="royleibov/Jamba-v0.1-ZipNN-Compressed")
|
| 30 |
```
|
| 31 |
```python
|
| 32 |
# Load model directly
|
|
|
|
| 35 |
|
| 36 |
zipnn_hf()
|
| 37 |
|
| 38 |
+
tokenizer = AutoTokenizer.from_pretrained("royleibov/Jamba-v0.1-ZipNN-Compressed")
|
| 39 |
+
model = AutoModelForCausalLM.from_pretrained("royleibov/Jamba-v0.1-ZipNN-Compressed")
|
| 40 |
```
|
| 41 |
### ZipNN
|
| 42 |
ZipNN also allows you to seemlessly save local disk space in your cache after the model is downloaded.
|