Saif Rehman Nasir
commited on
Commit
·
80a09fa
1
Parent(s):
5abd48d
Add llama tokenizer
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ import gradio as gr
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
import os
|
| 4 |
from rag import local_retriever, global_retriever
|
|
|
|
| 5 |
|
| 6 |
"""
|
| 7 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
|
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
import os
|
| 4 |
from rag import local_retriever, global_retriever
|
| 5 |
+
from transformers import LlamaTokenizer
|
| 6 |
|
| 7 |
"""
|
| 8 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
rag.py
CHANGED
|
@@ -14,7 +14,7 @@ from langchain_huggingface import HuggingFaceEndpoint
|
|
| 14 |
|
| 15 |
from typing import Dict, Any
|
| 16 |
from tqdm import tqdm
|
| 17 |
-
import
|
| 18 |
|
| 19 |
NEO4J_URI = os.getenv("NEO4J_URI")
|
| 20 |
NEO4J_USERNAME = os.getenv("NEO4J_USERNAME")
|
|
@@ -274,8 +274,10 @@ def global_retriever(query: str, level: int, response_type: str):
|
|
| 274 |
i += 1
|
| 275 |
|
| 276 |
###Debug####
|
| 277 |
-
|
| 278 |
-
|
|
|
|
|
|
|
| 279 |
print(f"Number of input tokens: {len(tokens)}")
|
| 280 |
###Debug###
|
| 281 |
final_response = reduce_chain.invoke(
|
|
|
|
| 14 |
|
| 15 |
from typing import Dict, Any
|
| 16 |
from tqdm import tqdm
|
| 17 |
+
from transformers import LlamaTokenizer
|
| 18 |
|
| 19 |
NEO4J_URI = os.getenv("NEO4J_URI")
|
| 20 |
NEO4J_USERNAME = os.getenv("NEO4J_USERNAME")
|
|
|
|
| 274 |
i += 1
|
| 275 |
|
| 276 |
###Debug####
|
| 277 |
+
global_tokenizer = LlamaTokenizer.from_pretrained(
|
| 278 |
+
"meta-llama/Meta-Llama-3-8B-Instruct"
|
| 279 |
+
)
|
| 280 |
+
tokens = global_tokenizer.encode(intermediate_results)
|
| 281 |
print(f"Number of input tokens: {len(tokens)}")
|
| 282 |
###Debug###
|
| 283 |
final_response = reduce_chain.invoke(
|