gdnjr5233-YOLOer commited on
Commit
5184e02
·
verified ·
1 Parent(s): b1dd069

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +167 -0
app.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import networkx as nx
3
+ import matplotlib.pyplot as plt
4
+ import time
5
+ import random
6
+
7
+ # 1 Dynamic Query Decomposition and Visualization (动态查询分解和可视化)
8
+ def decompose_query(query):
9
+ # 模拟MindSearch的查询分解,返回一个DAG表示分解流程
10
+ dag = nx.DiGraph()
11
+ dag.add_edges_from([
12
+ ("Main Query", "Sub-question 1"),
13
+ ("Main Query", "Sub-question 2"),
14
+ ("Sub-question 1", "Sub-question 1.1"),
15
+ ("Sub-question 1", "Sub-question 1.2"),
16
+ ("Sub-question 2", "Sub-question 2.1")
17
+ ])
18
+ explanations = {
19
+ "Main Query": "The main query to decompose.",
20
+ "Sub-question 1": "First major component of the query.",
21
+ "Sub-question 2": "Second major component of the query.",
22
+ "Sub-question 1.1": "Detail of Sub-question 1.",
23
+ "Sub-question 1.2": "Another aspect of Sub-question 1.",
24
+ "Sub-question 2.1": "Detail of Sub-question 2."
25
+ }
26
+ return dag, explanations
27
+
28
+ st.title("Dynamic Query Decomposition and Visualization")
29
+
30
+ # 修改:使用唯一的 key 来避免重复
31
+ query_1 = st.text_input("Enter your query for Query Decomposition:", key="query_1")
32
+ if query_1:
33
+ st.subheader("Query Decomposition")
34
+ dag, explanations = decompose_query(query_1)
35
+
36
+ # Visualize DAG
37
+ plt.figure(figsize=(10, 6))
38
+ pos = nx.spring_layout(dag)
39
+ nx.draw(dag, pos, with_labels=True, node_color="lightblue", node_size=3000, font_size=10, font_weight="bold")
40
+ st.pyplot(plt)
41
+
42
+ # Display explanations
43
+ st.subheader("Explanations for Sub-queries")
44
+ for node, explanation in explanations.items():
45
+ st.write(f"**{node}:** {explanation}")
46
+
47
+ # 2 Search Result Summarization and Comparison (搜寻结果摘要及比较)
48
+ def fetch_results(query, source):
49
+ # 模拟API结果返回
50
+ return [
51
+ {"title": f"{source} Result 1", "snippet": f"{source} snippet for {query}."},
52
+ {"title": f"{source} Result 2", "snippet": f"{source} snippet for {query}."}
53
+ ]
54
+
55
+ def summarize_results(results):
56
+ return " ".join([result["snippet"] for result in results])
57
+
58
+ st.title("Search Result Summarization and Comparison")
59
+
60
+ # 修改:使用唯一的 key 来避免重复
61
+ query_2 = st.text_input("Enter your query for Result Summarization:", key="query_2")
62
+ if query_2:
63
+ st.subheader("Fetching Results...")
64
+ bing_results = fetch_results(query_2, "Bing")
65
+ google_results = fetch_results(query_2, "Google")
66
+
67
+ st.subheader("Results Comparison")
68
+ col1, col2 = st.columns(2)
69
+ with col1:
70
+ st.write("**Bing Results:**")
71
+ for result in bing_results:
72
+ st.write(f"- {result['title']}: {result['snippet']}")
73
+ st.write("**Summary:**", summarize_results(bing_results))
74
+ with col2:
75
+ st.write("**Google Results:**")
76
+ for result in google_results:
77
+ st.write(f"- {result['title']}: {result['snippet']}")
78
+ st.write("**Summary:**", summarize_results(google_results))
79
+
80
+
81
+ # 3 Search Engine Efficiency Test (搜索引擎效率测试)
82
+ def mock_parallel_search(query):
83
+ time.sleep(random.uniform(0.5, 1.0)) # Simulate API latency
84
+ return random.randint(5, 15) # Mock number of pages retrieved
85
+
86
+ st.title("Search Engine Efficiency Test")
87
+
88
+ # 修改:使用唯一的 key 来避免重复
89
+ query_3 = st.text_input("Enter your query for Efficiency Test:", key="query_3")
90
+ if query_3:
91
+ st.subheader("Efficiency Comparison")
92
+ start_time = time.time()
93
+
94
+ # Simulate parallel search
95
+ st.write("Executing parallel search...")
96
+ pages_retrieved = [mock_parallel_search(query_3) for _ in range(3)]
97
+ parallel_time = time.time() - start_time
98
+
99
+ # Simulate sequential search
100
+ st.write("Executing sequential search...")
101
+ start_time = time.time()
102
+ sequential_pages_retrieved = sum([mock_parallel_search(query_3) for _ in range(3)])
103
+ sequential_time = time.time() - start_time
104
+
105
+ # Display metrics
106
+ st.write(f"**Parallel Search:** {sum(pages_retrieved)} pages retrieved in {parallel_time:.2f} seconds.")
107
+ st.write(f"**Sequential Search:** {sequential_pages_retrieved} pages retrieved in {sequential_time:.2f} seconds.")
108
+
109
+
110
+ # 4 Integration and optimization (整合与优化)
111
+ # def main():
112
+ # st.sidebar.title("MindSearch Demos")
113
+ # demo = st.sidebar.radio("Select Demo", ["Query Decomposition", "Result Summarization", "Efficiency Test"])
114
+
115
+ # if demo == "Query Decomposition":
116
+ # # 使用唯一的 key
117
+ # query_1 = st.text_input("Enter your query for Query Decomposition:", key="query_1_decomposition")
118
+ # if query_1:
119
+ # st.subheader("Query Decomposition")
120
+ # dag, explanations = decompose_query(query_1)
121
+ # plt.figure(figsize=(10, 6))
122
+ # pos = nx.spring_layout(dag)
123
+ # nx.draw(dag, pos, with_labels=True, node_color="lightblue", node_size=3000, font_size=10, font_weight="bold")
124
+ # st.pyplot(plt)
125
+ # st.subheader("Explanations for Sub-queries")
126
+ # for node, explanation in explanations.items():
127
+ # st.write(f"**{node}:** {explanation}")
128
+
129
+ # elif demo == "Result Summarization":
130
+ # # 使用唯一的 key
131
+ # query_2 = st.text_input("Enter your query for Result Summarization:", key="query_2_summarization")
132
+ # if query_2:
133
+ # st.subheader("Fetching Results...")
134
+ # bing_results = fetch_results(query_2, "Bing")
135
+ # google_results = fetch_results(query_2, "Google")
136
+ # st.subheader("Results Comparison")
137
+ # col1, col2 = st.columns(2)
138
+ # with col1:
139
+ # st.write("**Bing Results:**")
140
+ # for result in bing_results:
141
+ # st.write(f"- {result['title']}: {result['snippet']}")
142
+ # st.write("**Summary:**", summarize_results(bing_results))
143
+ # with col2:
144
+ # st.write("**Google Results:**")
145
+ # for result in google_results:
146
+ # st.write(f"- {result['title']}: {result['snippet']}")
147
+ # st.write("**Summary:**", summarize_results(google_results))
148
+
149
+ # elif demo == "Efficiency Test":
150
+ # # 使用唯一的 key
151
+ # query_3 = st.text_input("Enter your query for Efficiency Test:", key="query_3_efficiency")
152
+ # if query_3:
153
+ # st.subheader("Efficiency Comparison")
154
+ # start_time = time.time()
155
+ # st.write("Executing parallel search...")
156
+ # pages_retrieved = [mock_parallel_search(query_3) for _ in range(3)]
157
+ # parallel_time = time.time() - start_time
158
+ # st.write("Executing sequential search...")
159
+ # start_time = time.time()
160
+ # sequential_pages_retrieved = sum([mock_parallel_search(query_3) for _ in range(3)])
161
+ # sequential_time = time.time() - start_time
162
+ # st.write(f"**Parallel Search:** {sum(pages_retrieved)} pages retrieved in {parallel_time:.2f} seconds.")
163
+ # st.write(f"**Sequential Search:** {sequential_pages_retrieved} pages retrieved in {sequential_time:.2f} seconds.")
164
+
165
+
166
+ # if __name__ == "__main__":
167
+ # main()