Spaces:
Running
Running
Ilyas KHIAT
commited on
Commit
•
81f75af
1
Parent(s):
3b29c80
paste
Browse files- RAG_PDF_WEB.py +3 -3
- chat_te.py +3 -3
RAG_PDF_WEB.py
CHANGED
@@ -50,17 +50,17 @@ def get_doc_chunks(docs):
|
|
50 |
return docs
|
51 |
|
52 |
def get_vectorstore_from_docs(doc_chunks):
|
53 |
-
embedding = OpenAIEmbeddings(model="text-embedding-3-
|
54 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
55 |
return vectorstore
|
56 |
|
57 |
def get_vectorstore(text_chunks):
|
58 |
-
embedding = OpenAIEmbeddings(model="text-embedding-3-
|
59 |
vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embedding)
|
60 |
return vectorstore
|
61 |
|
62 |
def get_conversation_chain(vectorstore):
|
63 |
-
llm = ChatOpenAI(model="gpt-
|
64 |
retriever=vectorstore.as_retriever()
|
65 |
prompt = hub.pull("rlm/rag-prompt")
|
66 |
|
|
|
50 |
return docs
|
51 |
|
52 |
def get_vectorstore_from_docs(doc_chunks):
|
53 |
+
embedding = OpenAIEmbeddings(model="text-embedding-3-small")
|
54 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
55 |
return vectorstore
|
56 |
|
57 |
def get_vectorstore(text_chunks):
|
58 |
+
embedding = OpenAIEmbeddings(model="text-embedding-3-small")
|
59 |
vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embedding)
|
60 |
return vectorstore
|
61 |
|
62 |
def get_conversation_chain(vectorstore):
|
63 |
+
llm = ChatOpenAI(model="gpt-3.5-turbo",temperature=0.5, max_tokens=2048)
|
64 |
retriever=vectorstore.as_retriever()
|
65 |
prompt = hub.pull("rlm/rag-prompt")
|
66 |
|
chat_te.py
CHANGED
@@ -20,12 +20,12 @@ def get_docs_from_pdf(file):
|
|
20 |
return docs
|
21 |
|
22 |
def get_doc_chunks(docs):
|
23 |
-
text_splitter = SemanticChunker(OpenAIEmbeddings(model="text-embedding-3-
|
24 |
chunks = text_splitter.split_documents(docs)
|
25 |
return chunks
|
26 |
|
27 |
def get_vectorstore_from_docs(doc_chunks):
|
28 |
-
embedding = OpenAIEmbeddings(model="text-embedding-3-
|
29 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
30 |
return vectorstore
|
31 |
|
@@ -47,7 +47,7 @@ def create_db(file):
|
|
47 |
# docs = get_docs_from_pdf(file)
|
48 |
# doc_chunks = get_doc_chunks(docs)
|
49 |
# vectorstore = get_vectorstore_from_docs(doc_chunks)
|
50 |
-
vectorstore = FAISS.load_local(file, OpenAIEmbeddings(model="text-embedding-3-
|
51 |
return vectorstore
|
52 |
|
53 |
def get_response(chain,user_query, chat_history):
|
|
|
20 |
return docs
|
21 |
|
22 |
def get_doc_chunks(docs):
|
23 |
+
text_splitter = SemanticChunker(OpenAIEmbeddings(model="text-embedding-3-small"))
|
24 |
chunks = text_splitter.split_documents(docs)
|
25 |
return chunks
|
26 |
|
27 |
def get_vectorstore_from_docs(doc_chunks):
|
28 |
+
embedding = OpenAIEmbeddings(model="text-embedding-3-small")
|
29 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
30 |
return vectorstore
|
31 |
|
|
|
47 |
# docs = get_docs_from_pdf(file)
|
48 |
# doc_chunks = get_doc_chunks(docs)
|
49 |
# vectorstore = get_vectorstore_from_docs(doc_chunks)
|
50 |
+
vectorstore = FAISS.load_local(file, OpenAIEmbeddings(model="text-embedding-3-small"),allow_dangerous_deserialization= True)
|
51 |
return vectorstore
|
52 |
|
53 |
def get_response(chain,user_query, chat_history):
|