Nicolai Berk commited on
Commit
5c94b0e
·
1 Parent(s): f499a63

Temporarily remove reranker

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -48,7 +48,7 @@ index = faiss.IndexFlatL2(corpus_embeddings_np.shape[1])
48
  index.add(corpus_embeddings_np)
49
 
50
  # Reranker model
51
- reranker = CrossEncoder("cross-encoder/ms-marco-MiniLM-L-6-v2")
52
 
53
  # Generator (choose one: local HF model or OpenAI)
54
  tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
@@ -68,17 +68,17 @@ def rag_pipeline(query):
68
  for doc in retrieved_docs:
69
  print("-", repr(doc))
70
 
71
- # Rerank
72
- rerank_pairs = [[str(query), str(doc)] for doc in retrieved_docs if isinstance(doc, str) and doc.strip()]
73
- if not rerank_pairs:
74
- return "No valid documents found to rerank."
75
- scores = reranker.predict(rerank_pairs)
76
 
77
- scores = reranker.predict(rerank_pairs)
78
- reranked_docs = [doc for _, doc in sorted(zip(scores, retrieved_docs), reverse=True)]
79
 
80
  # Combine for context
81
- context = "\n\n".join(reranked_docs[:2])
82
  prompt = f"""Answer the following question using the provided context.\n\nContext:\n{context}\n\nQuestion: {query}\nAnswer:"""
83
 
84
  # Generate
 
48
  index.add(corpus_embeddings_np)
49
 
50
  # Reranker model
51
+ # reranker = CrossEncoder("cross-encoder/ms-marco-MiniLM-L-6-v2")
52
 
53
  # Generator (choose one: local HF model or OpenAI)
54
  tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
 
68
  for doc in retrieved_docs:
69
  print("-", repr(doc))
70
 
71
+ # # Rerank
72
+ # rerank_pairs = [[str(query), str(doc)] for doc in retrieved_docs if isinstance(doc, str) and doc.strip()]
73
+ # if not rerank_pairs:
74
+ # return "No valid documents found to rerank."
75
+ # scores = reranker.predict(rerank_pairs)
76
 
77
+ # scores = reranker.predict(rerank_pairs)
78
+ # reranked_docs = [doc for _, doc in sorted(zip(scores, retrieved_docs), reverse=True)]
79
 
80
  # Combine for context
81
+ context = "\n\n".join(retrieved_docs[:2])
82
  prompt = f"""Answer the following question using the provided context.\n\nContext:\n{context}\n\nQuestion: {query}\nAnswer:"""
83
 
84
  # Generate