Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -17,7 +17,7 @@ from langchain.memory import ConversationBufferMemory
|
|
| 17 |
from langchain.chains import ConversationalRetrievalChain
|
| 18 |
from htmlTemplates import css, bot_template, user_template
|
| 19 |
from langchain.llms import HuggingFaceHub
|
| 20 |
-
from huggingface_hub import
|
| 21 |
|
| 22 |
|
| 23 |
def get_pdf_text(pdf_docs):
|
|
@@ -106,7 +106,7 @@ def get_conversation_chain(vectorstore):
|
|
| 106 |
A conversational retrieval chain for generating responses.
|
| 107 |
|
| 108 |
"""
|
| 109 |
-
llm =
|
| 110 |
repo_id="mistralai/Mixtral-8x22B-Instruct-v0.1",
|
| 111 |
model_kwargs={"temperature": 0.5, "max_length": 1048},
|
| 112 |
)
|
|
|
|
| 17 |
from langchain.chains import ConversationalRetrievalChain
|
| 18 |
from htmlTemplates import css, bot_template, user_template
|
| 19 |
from langchain.llms import HuggingFaceHub
|
| 20 |
+
from huggingface_hub import InferenceApi
|
| 21 |
|
| 22 |
|
| 23 |
def get_pdf_text(pdf_docs):
|
|
|
|
| 106 |
A conversational retrieval chain for generating responses.
|
| 107 |
|
| 108 |
"""
|
| 109 |
+
llm = InferenceApi(
|
| 110 |
repo_id="mistralai/Mixtral-8x22B-Instruct-v0.1",
|
| 111 |
model_kwargs={"temperature": 0.5, "max_length": 1048},
|
| 112 |
)
|