MyEnny commited on
Commit
e4ad132
·
verified ·
1 Parent(s): d4f0d5c

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -66,7 +66,7 @@ llm = HuggingFacePipeline(pipeline=pipe)
66
  # --- Step 7: Setup memory and QA chain ---
67
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
68
 
69
- prompt = PromptTemplate.from_template("""
70
  You are a helpful assistant at the University of Hertfordshire. Use the context below to answer the question clearly and factually.
71
  If the answer is not in the context, say you don't know.
72
 
@@ -76,15 +76,15 @@ Context:
76
  Question:
77
  {question}
78
 
79
- Helpful Answer:
80
  """)
81
 
82
  qa_chain = ConversationalRetrievalChain.from_llm(
83
  llm=llm,
84
  retriever=vectordb.as_retriever(search_kwargs={"k": 3}),
85
  memory=memory,
86
- chain_type="map_reduce",
87
- combine_docs_chain_kwargs={"combine_prompt": prompt}
88
  )
89
 
90
  # --- Step 8: Define chatbot logic ---
 
66
  # --- Step 7: Setup memory and QA chain ---
67
  memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
68
 
69
+ custom_prompt = PromptTemplate.from_template("""
70
  You are a helpful assistant at the University of Hertfordshire. Use the context below to answer the question clearly and factually.
71
  If the answer is not in the context, say you don't know.
72
 
 
76
  Question:
77
  {question}
78
 
79
+ Answer:
80
  """)
81
 
82
  qa_chain = ConversationalRetrievalChain.from_llm(
83
  llm=llm,
84
  retriever=vectordb.as_retriever(search_kwargs={"k": 3}),
85
  memory=memory,
86
+ chain_type="stuff",
87
+ combine_docs_chain_kwargs={"prompt": custom_prompt}
88
  )
89
 
90
  # --- Step 8: Define chatbot logic ---