Update README.md
Browse files
README.md
CHANGED
@@ -63,6 +63,11 @@ base_model:
|
|
63 |
|
64 |
μλ μ½λλ₯Ό μ¬μ©νμ¬ λͺ¨λΈ μΆλ‘ μ μμν μ μμ΅λλ€.
|
65 |
|
|
|
|
|
|
|
|
|
|
|
66 |
```python
|
67 |
import torch
|
68 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
@@ -87,8 +92,7 @@ messages = [{'role': 'user', 'content': input_text}]
|
|
87 |
chat_input = tokenizer.apply_chat_template(
|
88 |
messages,
|
89 |
add_generation_prompt=True,
|
90 |
-
tokenize=False
|
91 |
-
enable_thinking=False # 'enable_thinking' νλΌλ―Έν°κ° μμ κ²½μ° μ΄ μ€μ μ κ±°νμΈμ.
|
92 |
)
|
93 |
|
94 |
# λͺ¨λΈ μ
λ ₯ μμ±
|
|
|
63 |
|
64 |
μλ μ½λλ₯Ό μ¬μ©νμ¬ λͺ¨λΈ μΆλ‘ μ μμν μ μμ΅λλ€.
|
65 |
|
66 |
+
**μ΄ λͺ¨λΈμ `transformers` λΌμ΄λΈλ¬λ¦¬ λ²μ 4.51.3 μ΄μμ΄ νμν©λλ€. μνν μ¬μ©μ μν΄ λΌμ΄λΈλ¬λ¦¬ λ²μ μ νμΈνκ³ νμμ μ
κ·Έλ μ΄λν΄ μ£ΌμΈμ.**
|
67 |
+
```python
|
68 |
+
!pip install "transformers>=4.51.3"
|
69 |
+
```
|
70 |
+
|
71 |
```python
|
72 |
import torch
|
73 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
|
92 |
chat_input = tokenizer.apply_chat_template(
|
93 |
messages,
|
94 |
add_generation_prompt=True,
|
95 |
+
tokenize=False
|
|
|
96 |
)
|
97 |
|
98 |
# λͺ¨λΈ μ
λ ₯ μμ±
|