Spaces:
Sleeping
Sleeping
Update model.py
Browse files
model.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import os
|
3 |
-
|
|
|
|
|
4 |
def modelFeedback(ats_score, resume_data, job_description):
|
5 |
"""
|
6 |
Generate ATS feedback by utilizing a pre-configured pipeline.
|
@@ -50,7 +52,6 @@ def modelFeedback(ats_score, resume_data, job_description):
|
|
50 |
input_ids,
|
51 |
max_length=1500,
|
52 |
temperature=0.01,
|
53 |
-
top_p=0.7,
|
54 |
pad_token_id=tokenizer.eos_token_id # Ensure padding works properly
|
55 |
)
|
56 |
|
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import os
|
3 |
+
import torch
|
4 |
+
if torch.cuda.is_available():
|
5 |
+
model.to('cuda')
|
6 |
def modelFeedback(ats_score, resume_data, job_description):
|
7 |
"""
|
8 |
Generate ATS feedback by utilizing a pre-configured pipeline.
|
|
|
52 |
input_ids,
|
53 |
max_length=1500,
|
54 |
temperature=0.01,
|
|
|
55 |
pad_token_id=tokenizer.eos_token_id # Ensure padding works properly
|
56 |
)
|
57 |
|