Update README.md
Browse files
README.md
CHANGED
@@ -21,7 +21,14 @@ This model is an advanced fp8 quantized version of google/gemma-3-27b-it, meticu
|
|
21 |
|
22 |
|
23 |
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
```python3
|
26 |
|
27 |
from transformers import AutoProcessor, Gemma3ForConditionalGeneration, BitsAndBytesConfig
|
|
|
21 |
|
22 |
|
23 |
|
24 |
+
# Inference with HuggingFace
|
25 |
+
|
26 |
+
## Transformers library
|
27 |
+
|
28 |
+
```sh
|
29 |
+
pip install git+https://github.com/huggingface/[email protected]
|
30 |
+
```
|
31 |
+
|
32 |
```python3
|
33 |
|
34 |
from transformers import AutoProcessor, Gemma3ForConditionalGeneration, BitsAndBytesConfig
|