Upload folder using huggingface_hub
Browse files- Test_RAG.py +1 -1
Test_RAG.py
CHANGED
|
@@ -291,7 +291,7 @@ if llm_model_id == "red-pajama-3b-chat" and "GPU" in core.available_devices and
|
|
| 291 |
ov_config["INFERENCE_PRECISION_HINT"] = "f32"
|
| 292 |
|
| 293 |
llm = HuggingFacePipeline.from_model_id(
|
| 294 |
-
model_id= "meta-llama/Meta-Llama-3-8B"
|
| 295 |
#meta-llama/Meta-Llama-3-8B------------/meta-llama/Llama-3.2-3B-Instruct
|
| 296 |
task="text-generation",
|
| 297 |
backend="openvino",
|
|
|
|
| 291 |
ov_config["INFERENCE_PRECISION_HINT"] = "f32"
|
| 292 |
|
| 293 |
llm = HuggingFacePipeline.from_model_id(
|
| 294 |
+
model_id= "meta-llama/Meta-Llama-3-8B",
|
| 295 |
#meta-llama/Meta-Llama-3-8B------------/meta-llama/Llama-3.2-3B-Instruct
|
| 296 |
task="text-generation",
|
| 297 |
backend="openvino",
|