4PHealth commited on
Commit
2caea52
·
1 Parent(s): 89257e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -10
app.py CHANGED
@@ -1,32 +1,39 @@
1
- from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
2
  from langchain.chat_models import ChatOpenAI
3
  import gradio as gr
4
  import sys
5
  import os
6
 
7
- os.environ["OPENAI_API_KEY"] = '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'
8
 
9
  def construct_index(directory_path):
10
  max_input_size = 1000000
11
- num_outputs = 456
12
  max_chunk_overlap = 20
13
- chunk_size_limit = 6048
14
 
15
- prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
16
 
17
- llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
18
 
19
  documents = SimpleDirectoryReader(directory_path).load_data()
20
 
21
- index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper,chunk_size_limit = 6048)
22
 
23
- index.save_to_disk('index.json')
 
24
 
25
  return index
26
 
27
  def chatbot(input_text):
28
- index = GPTSimpleVectorIndex.load_from_disk('index.json')
29
- response = index.query(input_text, response_mode="compact")
 
 
 
 
 
 
30
  return response.response
31
 
32
  iface = gr.Interface(fn=chatbot,
 
1
+ from llama_index import SimpleDirectoryReader, GPTListIndex, GPTVectorStoreIndex, StorageContext, LLMPredictor, PromptHelper, load_index_from_storage
2
  from langchain.chat_models import ChatOpenAI
3
  import gradio as gr
4
  import sys
5
  import os
6
 
7
+ os.environ["OPENAI_API_KEY"] = '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'
8
 
9
  def construct_index(directory_path):
10
  max_input_size = 1000000
11
+ num_outputs = 256
12
  max_chunk_overlap = 20
13
+ chunk_size_limit = 2048
14
 
15
+ prompt_helper = PromptHelper(max_input_size, num_outputs, chunk_overlap_ratio= 0.1, chunk_size_limit=chunk_size_limit)
16
 
17
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=1, model_name="gpt-4", max_tokens=num_outputs))
18
 
19
  documents = SimpleDirectoryReader(directory_path).load_data()
20
 
21
+ index = GPTVectorStoreIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper,chunk_size_limit = 2048)
22
 
23
+
24
+ index.storage_context.persist(persist_dir="index.json")
25
 
26
  return index
27
 
28
  def chatbot(input_text):
29
+
30
+ storage_context = StorageContext.from_defaults(persist_dir="index.json")
31
+ index = load_index_from_storage(storage_context)
32
+
33
+ query_engine = index.as_query_engine()
34
+ response = query_engine.query(input_text)
35
+
36
+
37
  return response.response
38
 
39
  iface = gr.Interface(fn=chatbot,