tracyshen301
commited on
Commit
·
ca1a2dd
1
Parent(s):
48552a5
Configure Git LFS for image assets and prepare for push
Browse files- .gitattributes +2 -0
- .gitignore +56 -0
- README.md +196 -14
- app.py +282 -60
- assets/Analyzer_Output.jpg +3 -0
- assets/ImageFetch_Output.jpg +3 -0
- assets/Planner_Output1.jpg +3 -0
- assets/Planner_Output2.jpg +3 -0
- assets/Planner_Output3.jpg +3 -0
- assets/Planner_Output_Group_Iceland1.jpg +3 -0
- assets/Planner_Output_Group_Iceland2.jpg +3 -0
- assets/Summarize_Output.jpg +3 -0
- assets/bot.png +3 -0
- assets/travel_map_screenshot.jpg +3 -0
- assets/user.png +3 -0
- pyproject.toml +43 -0
- requirements.txt +0 -1
- server.py +46 -0
- test_personalized_planner.py +25 -0
- test_web_search.py +25 -0
- tools/enhanced_personalized_planner.py +290 -0
- tools/fetch_trips_by_location.py +54 -0
- tools/journal_creator.py +0 -0
- tools/personalized_planner.py +143 -0
- tools/travel_data_analyzer.py +187 -0
- tools/user_trip_summary.py +36 -0
- tools/web_search.py +100 -0
- travel_data_index/default__vector_store.json +0 -0
- travel_data_index/docstore.json +1 -0
- travel_data_index/graph_store.json +1 -0
- travel_data_index/image__vector_store.json +1 -0
- travel_data_index/index_store.json +1 -0
- trip_utils.py +34 -0
- uv.lock +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
assets/*.jpg filter=lfs diff=lfs merge=lfs -text
|
37 |
+
assets/*.png filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__pycache__/
|
2 |
+
*.pyc
|
3 |
+
*.pyo
|
4 |
+
*.pyd
|
5 |
+
*build/
|
6 |
+
*dist/
|
7 |
+
*.egg-info/
|
8 |
+
*.egg
|
9 |
+
pip-wheel-metadata/
|
10 |
+
MANIFEST
|
11 |
+
|
12 |
+
# Virtual Environments
|
13 |
+
.venv/
|
14 |
+
venv/
|
15 |
+
env/
|
16 |
+
ENV/
|
17 |
+
.env # For actual secrets - NEVER commit this
|
18 |
+
!.env.example # But DO commit the example file
|
19 |
+
|
20 |
+
.vscode/
|
21 |
+
.idea/
|
22 |
+
*.project
|
23 |
+
*.pydevproject
|
24 |
+
*.kdev4
|
25 |
+
*.sublime-workspace
|
26 |
+
*.sublime-project
|
27 |
+
|
28 |
+
# OS generated files
|
29 |
+
.DS_Store
|
30 |
+
.DS_Store?
|
31 |
+
._*
|
32 |
+
.Spotlight-V100
|
33 |
+
.Trashes
|
34 |
+
ehthumbs.db
|
35 |
+
Thumbs.db
|
36 |
+
desktop.ini
|
37 |
+
|
38 |
+
# Jupyter Notebook Checkpoints
|
39 |
+
.ipynb_checkpoints
|
40 |
+
|
41 |
+
# Log files
|
42 |
+
*.log
|
43 |
+
logs/
|
44 |
+
|
45 |
+
# Coverage reports
|
46 |
+
.coverage
|
47 |
+
.coverage.*
|
48 |
+
htmlcov/
|
49 |
+
coverage.xml
|
50 |
+
|
51 |
+
# Temporary files
|
52 |
+
*.tmp
|
53 |
+
*.bak
|
54 |
+
*.swp
|
55 |
+
*~
|
56 |
+
|
README.md
CHANGED
@@ -1,14 +1,196 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ✈️🌍 PersonaTrip Agent - My Personalized Travel Planning & Community Insights AI Assistant! 🚀
|
2 |
+
|
3 |
+
[](https://opensource.org/licenses/MIT)
|
4 |
+
[](https://github.com/YOUR_HACKATHON_LINK)
|
5 |
+
[](https://www.anthropic.com/news/claude-3-5-sonnet)
|
6 |
+
[](https://www.llamaindex.ai/)
|
7 |
+
[](https://www.gradio.app/)
|
8 |
+
[](https://cloudinary.com/)
|
9 |
+
[](https://huggingface.co/)
|
10 |
+
[](https://huggingface.co/BAAI/bge-small-en-v1.5)
|
11 |
+
|
12 |
+
> "The world is a book, and those who do not travel read only one page." — Augustine 📖
|
13 |
+
|
14 |
+
I have always believed in the meaning of travel and am committed to making every journey full of personality and surprises.
|
15 |
+
In the cold and lonely Finland, travel sharing in the community makes people feel companionship and warmth, connecting people's memories and explorations.
|
16 |
+
|
17 |
+
Last week, I launched my **[Travel Map Community Website](www.tripersona.online)**, where community members can share their travel footprints. And now, I bring you **PersonaTrip Agent** — an intelligent travel assistant that not only plans personalized itineraries but also gains insights into travel trends from community data, injecting unprecedented intelligence and personalized experience into my Travel Map community! ✨
|
18 |
+
|
19 |
+
By integrating the powerful large language model (**Anthropic Claude 3.5 Sonnet**), **LlamaIndex** retrieval-augmented generation (**RAG**) framework, **Gradio** interactive interface, **Cloudinary** image service, and **Hugging Face**'s **BAAI/bge-small-en-v1.5** embedding model, combined with Model Context Protocol (**MCP**) driven toolset, I have created this system that can deeply understand user intent through multimodal text and visual methods, and generate intelligent travel suggestions and insights using Travel Map database.
|
20 |
+
|
21 |
+
## 🌟 Project Highlights
|
22 |
+
|
23 |
+
- **Community Data Empowerment** 🔗: PersonaTrip Agent seamlessly connects with my Travel Map website database (MongoDB), all travel suggestions and insights come from real user travel records, ensuring high credibility and relevance.
|
24 |
+
|
25 |
+
- **Data Value Reactivation** 💎: Transform static travel records into interactive intelligent services and knowledge graphs, injecting new vitality into community data.
|
26 |
+
|
27 |
+
- **Personalized Itinerary Planning (Based on LlamaIndex RAG)** 💖: `personalized_travel_planner` uses **LlamaIndex RAG** to retrieve user historical travel content and preferences, combined with LLM to generate truly "made for you" customized itineraries.
|
28 |
+
|
29 |
+
- **Community Trend Insights (Based on LlamaIndex RAG)** 📊: `travel_data_analyzer` can analyze entire community data, answering statistical questions like "Who is the most active traveler?" or "What are the most popular destinations?", providing data support for content recommendations and community interaction.
|
30 |
+
|
31 |
+
- **Multimodal Intelligent Interactive Experience** 📸🖼️🧠:
|
32 |
+
|
33 |
+
- **Image Recognition Landmarks**: After users upload travel photos, Agent uses **Claude 3.5 Sonnet**'s visual understanding capability to determine geographical locations in photos.
|
34 |
+
- **Image-Driven Travel Record Retrieval**: Based on visual recognition results, automatically calls `fetch_trips_by_location` to retrieve corresponding location travel records from Travel Map community.
|
35 |
+
- **Historical Visual Preferences Integration into Personalized Itineraries**: `enhanced_personalized_travel_planner` analyzes descriptions and scenes of user-uploaded images, extracts visual preferences, and integrates them into future itinerary recommendations for deeper "understanding".
|
36 |
+
|
37 |
+
- **Model Context Protocol (MCP) Driven** 🛠️:
|
38 |
+
|
39 |
+
- `enhanced_personalized_travel_planner`: Core RAG planning tool that can deeply analyze historical travel data and preferences of single or multiple users in Travel Map. Combined with current requests and optional real-time web intelligence, generates highly customized complete travel plans for individuals or teams.
|
40 |
+
- `fetch_trips_by_location`: Quickly retrieve community travel records by location name.
|
41 |
+
- `summarize_trip`: Summarize user's historical travel content to help with memory and sharing.
|
42 |
+
- `travel_data_analyzer`: Build community-level index and conduct statistical analysis, supporting cross-user data queries.
|
43 |
+
- `web_search`: Call Tavily API to get real-time information (weather, strikes, ticket availability, etc.) to assist dynamic planning.
|
44 |
+
|
45 |
+
- **Future Integration into Travel Map Official Website** 🧩: This Hackathon's Gradio prototype will be directly integrated into my Travel Map website in the future, serving as a native chat assistant to continuously serve community users.
|
46 |
+
|
47 |
+
## 🗺️ Travel Map Collaborative Experience
|
48 |
+
|
49 |
+
All data used by PersonaTrip Agent comes from my online Travel Map website.
|
50 |
+
|
51 |
+

|
52 |
+
_Note: This shows my Travel Map website screenshot, displaying pin points or itinerary cards on the map._
|
53 |
+
|
54 |
+
## 🚀 Main Feature Demonstrations
|
55 |
+
|
56 |
+
### 1. Plan Personalized New Journey (RAG + Optional Web Intelligence) 🗺️
|
57 |
+
|
58 |
+
Whether for in-depth customization for individuals or careful planning for multi-person teams, PersonaTrip Agent can handle it!
|
59 |
+
|
60 |
+
**Scenario 1: Planning for Individual**
|
61 |
+
|
62 |
+
User input: "Hi, I'm Tracy Shen, planning a 4-day trip to Rome in early September, flying Finnair. I'm passionate about ancient history and authentic local food and prefer a moderately paced itinerary with some free time."
|
63 |
+
Agent will first call `web_search` to get real-time information about Rome in early September and Finnair, then pass this information along with user request to `enhanced_personalized_travel_planner`. This tool internally uses **LlamaIndex RAG** to analyze Tracy Shen's historical preferences and generate customized itinerary combining all information.
|
64 |
+
|
65 |
+

|
66 |
+
|
67 |
+

|
68 |
+
|
69 |
+

|
70 |
+
|
71 |
+
**Scenario 2: Planning for Multi-person Team**
|
72 |
+
|
73 |
+
User input: "Hello, please plan a 5-day trip to Iceland in September for Tracy Shen, Liu Shuhui, and Jialong Xu."
|
74 |
+
Agent will also first call `web_search` to get real-time information about Iceland in September (weather, film festivals, safety advice, etc.). Then, it passes this information along with the team's joint request to `enhanced_personalized_travel_planner`. This tool will try to separately obtain (if available in database) historical preferences of Tracy Shen, Liu Shuhui, and Jialong Xu through **LlamaIndex RAG**, and strive to generate an Iceland itinerary that can balance diverse team interests and incorporate real-time intelligence.
|
75 |
+
|
76 |
+

|
77 |
+

|
78 |
+
|
79 |
+
### 2. Image Recognition ➡️ Travel Records 🖼️➡️📍
|
80 |
+
|
81 |
+
User uploads a Helsinki photo.
|
82 |
+
Agent (Claude Vision) recognizes it, then prompts and can call `fetch_trips_by_location` to find relevant community records from Travel Map database.
|
83 |
+
|
84 |
+

|
85 |
+
|
86 |
+
### 3. Community Travel Trend Insights (RAG) 📈❓
|
87 |
+
|
88 |
+
User: "Who is the most active global traveler in our community?" or "What are the most popular attractions everyone visits in Paris?"
|
89 |
+
Agent calls `travel_data_analyzer`, which uses **LlamaIndex** indexed complete community data, combined with LLM analysis to provide answers.
|
90 |
+
|
91 |
+

|
92 |
+
|
93 |
+
### 4. Review Specific Travel Memories 🔍
|
94 |
+
|
95 |
+
User: "Summarize my last trip to Helsinki."
|
96 |
+
Agent calls `summarize_trip` (based on LLM) to provide summary.
|
97 |
+
|
98 |
+

|
99 |
+
|
100 |
+
## 🛠️ Tech Stack
|
101 |
+
|
102 |
+
- **Data Source**: **MongoDB** (My Travel Map website database)
|
103 |
+
- **Large Language Model (LLM)**: **Anthropic Claude 3.5 Sonnet** (for core conversation logic, text content generation, powerful multimodal visual analysis and image understanding)
|
104 |
+
- **Retrieval Augmented Generation (RAG)**: **LlamaIndex**
|
105 |
+
- **Embedding Model**: Hugging Face **BAAI/bge-small-en-v1.5** (used through LlamaIndex)
|
106 |
+
- **Tool Execution**: **MCP** (Model Context Protocol)
|
107 |
+
- **Core Tools**:
|
108 |
+
- `enhanced_personalized_travel_planner` (Python, LlamaIndex RAG, MongoDB, Claude)
|
109 |
+
- `fetch_trips_by_location` (Python, MongoDB query)
|
110 |
+
- `summarize_trip` (Python, Claude for summarization)
|
111 |
+
- `travel_data_analyzer` (Python, LlamaIndex RAG, MongoDB, Claude)
|
112 |
+
- `web_search` (Python, **Tavily Search API**)
|
113 |
+
- **Image Processing and Storage**: **Cloudinary** (image upload and URL management), Base64 (API transmission)
|
114 |
+
- **Backend**: **Python**, Anthropic SDK, MCP SDK, **Pymongo**
|
115 |
+
- **Frontend and Interaction**: **Gradio**
|
116 |
+
|
117 |
+
## ⚙️ Local Setup Guide
|
118 |
+
|
119 |
+
1. **Clone my repository**:
|
120 |
+
```bash
|
121 |
+
git clone [My repository HTTPS or SSH link]
|
122 |
+
cd [My project directory name]
|
123 |
+
```
|
124 |
+
2. **Create and activate virtual environment**:
|
125 |
+
```bash
|
126 |
+
python -m venv .venv
|
127 |
+
source .venv/bin/activate # Linux/macOS
|
128 |
+
# .venv\Scripts\activate # Windows
|
129 |
+
```
|
130 |
+
3. **Install dependencies**:
|
131 |
+
This project uses `pyproject.toml` to define dependencies and recommends using `uv`, a fast Python package installer. If you haven't installed `uv` yet, please install it first (usually through `pip install uv` or refer to its official documentation).
|
132 |
+
|
133 |
+
After activating the virtual environment, use the following command to install dependencies:
|
134 |
+
|
135 |
+
```bash
|
136 |
+
uv pip sync
|
137 |
+
```
|
138 |
+
|
139 |
+
This command will read the `pyproject.toml` file and use `uv.lock` to quickly and consistently install all required packages to your virtual environment.
|
140 |
+
|
141 |
+
4. **Configure `.env` file**:
|
142 |
+
Copy `.env.example` (if I provided one) to `.env` and fill in all necessary API keys and connection strings:
|
143 |
+
```env
|
144 |
+
ANTHROPIC_API_KEY="sk-ant-..."
|
145 |
+
TAVILY_API_KEY="tvly-..." # If using web_search
|
146 |
+
CLOUDINARY_CLOUD_NAME="..."
|
147 |
+
CLOUDINARY_API_KEY="..."
|
148 |
+
CLOUDINARY_API_SECRET="..."
|
149 |
+
MONGODB_URI="mongodb+srv://..." # My Travel Map database connection string
|
150 |
+
```
|
151 |
+
5. **Build LlamaIndex index (first run or after data updates)**:
|
152 |
+
`travel_data_analyzer_tool.py` and `enhanced_planner_tool.py` contain `initialize_analyzer_tool()` / RAG initialization logic that will try to build index on first use and persist it to `./travel_data_index` directory. Ensure this directory is writable.
|
153 |
+
|
154 |
+
6. **Start Gradio app + Connect MCP server**:
|
155 |
+
```bash
|
156 |
+
python app.py
|
157 |
+
```
|
158 |
+
Open the provided local URL in browser. Click Connect in the user interface to connect to MCP server and start experiencing!
|
159 |
+
|
160 |
+
## 🔮 Future Plans: Building a Vibrant and Intelligent Travel Sharing and Planning Ecosystem! 🚀
|
161 |
+
|
162 |
+
My goal is not just to create an intelligent assistant, but to deeply integrate PersonaTrip Agent into my **Travel Map community website**, ultimately building a unique interactive platform focused on travel sharing and intelligent planning, making every journey more exciting and every share more valuable!
|
163 |
+
|
164 |
+
- [x] **Agent Client Seamless Integration into Travel Map Website (Core Goal in Progress)** 🖼️➡️💻:
|
165 |
+
Integrate PersonaTrip Agent as an intelligent chat assistant directly into Travel Map website, allowing users to get personalized suggestions, information queries, and new itinerary planning services anytime while browsing maps or others' itineraries.
|
166 |
+
|
167 |
+
- [ ] **Intelligent Travel Inspiration Engine (Based on Map and Community Popularity)** 🧭✨:
|
168 |
+
|
169 |
+
- When users explore different areas on Travel Map, Agent can proactively analyze popular itineraries, highly-rated food, hidden attractions shared by community members in that area, and combine with user's personalized preferences to intelligently recommend related travel inspiration and itinerary segments.
|
170 |
+
- For example: "Hey, I noticed you're looking at Kyoto's Gion district. Several friends in our community who, like you, enjoy deep cultural experiences have shared great tea ceremony experiences and kimono walking routes. Want to see them?"
|
171 |
+
|
172 |
+
- [ ] **One-click Generate and Share Beautiful Travel Journal/Travelogue (PDF/Web)** 📔✍️:
|
173 |
+
|
174 |
+
- After users generate itineraries using `enhanced_personalized_travel_planner` or review past itineraries from `fetch_trips_by_location` / `summarize_trip`, they can **export this information (including itineraries, image descriptions, personal notes, etc.) with one click into beautifully designed travel journals or shareable web travelogues**.
|
175 |
+
- Agent can assist in organizing and beautifying this content, making sharing easier and more attractive.
|
176 |
+
|
177 |
+
- [ ] **Travel Map Community Interaction Upgrade (Instagram-like focused on travel)** ❤️💬🔖:
|
178 |
+
|
179 |
+
- **Like, Save, Comment**: Users can like, save (for future travel inspiration), and comment on "travel record pins" or Agent-generated and shared "itinerary plans" shared by themselves or other community members on Travel Map, creating active community interaction.
|
180 |
+
- **Follow and Updates**: Users can follow travelers they like and view their latest travel shares and itinerary plans.
|
181 |
+
- **"Travel Personality" Badges and Display**: Based on Agent's analysis of user travel preferences, users can display unique "travel personality" badges or tags on their Travel Map personal homepage, adding social fun.
|
182 |
+
|
183 |
+
- [ ] **Agent Intelligence Evolution Based on Community Feedback** 👍👎🧠:
|
184 |
+
|
185 |
+
- Allow users to rate Agent-generated plans and provided insights (for example, through simple like/dislike or providing brief feedback).
|
186 |
+
- This feedback will be used to continuously optimize **LlamaIndex RAG system's retrieval strategy, LLM's prompt engineering**, and the effectiveness of core tools like `enhanced_personalized_travel_planner` and `travel_data_analyzer`, making Agent increasingly understand community users' needs.
|
187 |
+
|
188 |
+
- [ ] **Deepen Community Data Insights and Personalized Recommendations** 📊🎯:
|
189 |
+
|
190 |
+
- The `travel_data_analyzer` tool will be further enhanced to answer more complex community trend questions, such as: "What are the most popular hidden European destinations for families with children in our community?" or "What types of restaurants are users who like visiting museums usually interested in?"
|
191 |
+
- Based on these insights, Agent can provide users with more precise personalized content recommendations, such as proactively recommending relevant community popular itineraries or practical suggestions when users plan specific types of trips.
|
192 |
+
|
193 |
+
- [ ] **Collaborative Travel Planning** 🤝🗺️:
|
194 |
+
- Allow multiple Travel Map community users to jointly edit and refine a travel plan through PersonaTrip Agent, facilitating friends, family, or teams traveling together.
|
195 |
+
|
196 |
+
My vision is to create a vertical travel community that can inspire travel, efficiently plan itineraries, and allow every precious experience to be wonderfully shared and connected through the deep integration of PersonaTrip Agent and Travel Map website. Here there is not only intelligence, but also warmth and real connections!
|
app.py
CHANGED
@@ -1,64 +1,286 @@
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
-
from
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
message,
|
12 |
-
history: list[tuple[str, str]],
|
13 |
-
system_message,
|
14 |
-
max_tokens,
|
15 |
-
temperature,
|
16 |
-
top_p,
|
17 |
-
):
|
18 |
-
messages = [{"role": "system", "content": system_message}]
|
19 |
-
|
20 |
-
for val in history:
|
21 |
-
if val[0]:
|
22 |
-
messages.append({"role": "user", "content": val[0]})
|
23 |
-
if val[1]:
|
24 |
-
messages.append({"role": "assistant", "content": val[1]})
|
25 |
-
|
26 |
-
messages.append({"role": "user", "content": message})
|
27 |
-
|
28 |
-
response = ""
|
29 |
-
|
30 |
-
for message in client.chat_completion(
|
31 |
-
messages,
|
32 |
-
max_tokens=max_tokens,
|
33 |
-
stream=True,
|
34 |
-
temperature=temperature,
|
35 |
-
top_p=top_p,
|
36 |
-
):
|
37 |
-
token = message.choices[0].delta.content
|
38 |
-
|
39 |
-
response += token
|
40 |
-
yield response
|
41 |
-
|
42 |
-
|
43 |
-
"""
|
44 |
-
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
45 |
-
"""
|
46 |
-
demo = gr.ChatInterface(
|
47 |
-
respond,
|
48 |
-
additional_inputs=[
|
49 |
-
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
|
50 |
-
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
51 |
-
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
52 |
-
gr.Slider(
|
53 |
-
minimum=0.1,
|
54 |
-
maximum=1.0,
|
55 |
-
value=0.95,
|
56 |
-
step=0.05,
|
57 |
-
label="Top-p (nucleus sampling)",
|
58 |
-
),
|
59 |
-
],
|
60 |
-
)
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
63 |
if __name__ == "__main__":
|
64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import os
|
3 |
+
import json
|
4 |
+
from typing import List, Dict, Any
|
5 |
import gradio as gr
|
6 |
+
from mcp import ClientSession, StdioServerParameters
|
7 |
+
from mcp.client.stdio import stdio_client
|
8 |
+
from anthropic import Anthropic, APIError
|
9 |
+
from dotenv import load_dotenv
|
10 |
+
import cloudinary
|
11 |
+
import cloudinary.uploader
|
12 |
+
from contextlib import AsyncExitStack
|
13 |
+
import requests
|
14 |
+
import base64
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
+
load_dotenv()
|
17 |
+
|
18 |
+
cloudinary.config(cloud_name=os.getenv("CLOUDINARY_CLOUD_NAME"), api_key=os.getenv("CLOUDINARY_API_KEY"), api_secret=os.getenv("CLOUDINARY_API_SECRET"), secure=True)
|
19 |
+
try:
|
20 |
+
loop = asyncio.get_running_loop()
|
21 |
+
except RuntimeError:
|
22 |
+
loop = asyncio.new_event_loop()
|
23 |
+
asyncio.set_event_loop(loop)
|
24 |
+
|
25 |
+
def get_image_media_type_and_data(image_url: str):
|
26 |
+
try:
|
27 |
+
response = requests.get(image_url, timeout=10)
|
28 |
+
response.raise_for_status()
|
29 |
+
content_type = response.headers.get('Content-Type', 'image/jpeg')
|
30 |
+
image_data = base64.b64encode(response.content).decode('utf-8')
|
31 |
+
return content_type, image_data
|
32 |
+
except requests.exceptions.RequestException as e:
|
33 |
+
print(f"Error fetching image: {e}")
|
34 |
+
return None, None
|
35 |
+
|
36 |
+
class MCPClientWrapper:
|
37 |
+
def __init__(self):
|
38 |
+
self.session: ClientSession | None = None
|
39 |
+
self.exit_stack: AsyncExitStack | None = None
|
40 |
+
self.tools: List[Dict[str, Any]] = []
|
41 |
+
self.anthropic = Anthropic()
|
42 |
+
|
43 |
+
def connect(self, server_path: str) -> str:
|
44 |
+
return loop.run_until_complete(self._connect(server_path))
|
45 |
+
|
46 |
+
async def _connect(self, server_path: str) -> str:
|
47 |
+
if self.exit_stack: await self.exit_stack.aclose()
|
48 |
+
self.exit_stack = AsyncExitStack()
|
49 |
+
try:
|
50 |
+
server_params = StdioServerParameters(command="python3", args=[server_path], env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"})
|
51 |
+
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
|
52 |
+
self.stdio, self.write = stdio_transport
|
53 |
+
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
|
54 |
+
await self.session.initialize()
|
55 |
+
response = await self.session.list_tools()
|
56 |
+
self.tools = [{"name": tool.name, "description": tool.description, "input_schema": tool.inputSchema} for tool in response.tools]
|
57 |
+
print(f"DEBUG: Tools reported by MCP server: {[t['name'] for t in self.tools]}")
|
58 |
+
return f"✅ Connected. Available tools: {', '.join([t['name'] for t in self.tools])}"
|
59 |
+
except Exception as e:
|
60 |
+
print(f"Failed to connect: {e}")
|
61 |
+
self.session = None
|
62 |
+
if self.exit_stack: await self.exit_stack.aclose(); self.exit_stack = None
|
63 |
+
return f"❌ Connection Failed: {e}"
|
64 |
+
|
65 |
+
def process_message(self, multi_modal_data: Dict[str, Any], history: List[Dict[str, Any]]) -> tuple:
|
66 |
+
user_turn_content_for_gradio_display_parts = []
|
67 |
+
cloudinary_image_url_for_api = None
|
68 |
+
|
69 |
+
user_text_input = multi_modal_data.get("text", "")
|
70 |
+
user_files_input = multi_modal_data.get("files")
|
71 |
+
|
72 |
+
if not self.session:
|
73 |
+
updated_history = list(history)
|
74 |
+
temp_display_parts_for_no_session = []
|
75 |
+
if user_text_input: temp_display_parts_for_no_session.append(user_text_input)
|
76 |
+
if user_files_input: temp_display_parts_for_no_session.append("[Image Uploaded - Connection Error]")
|
77 |
+
user_final_content_for_history = "\n".join(temp_display_parts_for_no_session) if temp_display_parts_for_no_session else "[Empty Input]"
|
78 |
+
if user_final_content_for_history != "[Empty Input]":
|
79 |
+
updated_history.append({"role": "user", "content": user_final_content_for_history})
|
80 |
+
updated_history.append({"role": "assistant", "content": "⚠️ Please connect to an MCP server first."})
|
81 |
+
return updated_history, gr.MultimodalTextbox(value=None)
|
82 |
+
|
83 |
+
if user_text_input: user_turn_content_for_gradio_display_parts.append(user_text_input)
|
84 |
+
if user_files_input and len(user_files_input) > 0:
|
85 |
+
try:
|
86 |
+
print(f"DEBUG: Uploading file {user_files_input[0]} to Cloudinary for display and API.")
|
87 |
+
upload_result = cloudinary.uploader.upload(user_files_input[0], width=300, height=300, crop="limit", secure=True)
|
88 |
+
cloudinary_image_url_for_api = upload_result.get("secure_url")
|
89 |
+
if cloudinary_image_url_for_api:
|
90 |
+
user_turn_content_for_gradio_display_parts.append(f"")
|
91 |
+
else: user_turn_content_for_gradio_display_parts.append("[Image Uploaded (URL retrieval failed)]")
|
92 |
+
except Exception as e:
|
93 |
+
print(f"Error uploading image to Cloudinary: {e}")
|
94 |
+
user_turn_content_for_gradio_display_parts.append(f"[Image Upload Failed: {e}]")
|
95 |
+
|
96 |
+
user_content_for_gradio_history_turn = "\n".join(user_turn_content_for_gradio_display_parts)
|
97 |
+
if not user_content_for_gradio_history_turn.strip() and not (user_files_input and cloudinary_image_url_for_api) :
|
98 |
+
user_content_for_gradio_history_turn = "[Empty Input]"
|
99 |
+
|
100 |
+
history_for_llm_processing = list(history)
|
101 |
+
if user_content_for_gradio_history_turn != "[Empty Input]":
|
102 |
+
history_for_llm_processing.append({"role": "user", "content": user_content_for_gradio_history_turn})
|
103 |
+
|
104 |
+
assistant_response_text = loop.run_until_complete(
|
105 |
+
self._process_query(multi_modal_data, history_for_llm_processing, cloudinary_image_url_for_api)
|
106 |
+
)
|
107 |
+
|
108 |
+
final_gradio_history = list(history)
|
109 |
+
if user_content_for_gradio_history_turn != "[Empty Input]":
|
110 |
+
final_gradio_history.append({"role": "user", "content": user_content_for_gradio_history_turn})
|
111 |
+
if assistant_response_text:
|
112 |
+
final_gradio_history.append({"role": "assistant", "content": assistant_response_text})
|
113 |
+
|
114 |
+
return final_gradio_history, gr.MultimodalTextbox(value=None)
|
115 |
+
|
116 |
+
async def _process_query(
|
117 |
+
self,
|
118 |
+
multi_modal_data_from_user: Dict[str, Any],
|
119 |
+
current_complete_history_for_llm: List[Dict[str, Any]],
|
120 |
+
uploaded_image_url_for_api: str | None = None
|
121 |
+
) -> str:
|
122 |
+
system_prompt = """
|
123 |
+
You are an intelligent travel assistant. Your goal is to help users by using the available tools: `summarize_trip`, `web_search`, `enhanced_personalized_travel_planner`, `travel_data_analyzer` and `fetch_trips_by_location`.
|
124 |
+
|
125 |
+
- If the user asks to **plan a NEW trip** (e.g., "Plan a trip to Paris for Tracy", "Plan a trip to Paris for Tracy for 5 days in early August. The Louvre and a specific French restaurant are must-sees, and I'd like to see an opera at night. We'll be flying Finnair."):
|
126 |
+
1. First, consider if current, real-time information (from `web_search`) could enhance the plan or is necessary for its viability (e.g., for specific dates, named entities like attractions, airlines, or if user implies need for current status).
|
127 |
+
2. If you decide to use `web_search`, call it first. Collect its output as `web_intelligence`.
|
128 |
+
3. Then, you MUST call the `enhanced_personalized_travel_planner` tool. Provide it with all standard planning details (user_name, destination, duration, user_request with all specifics like Louvre, Finnair, opera, restaurants) AND the `web_intelligence` (if gathered, otherwise pass an empty string or null equivalent).
|
129 |
+
4. CRITICAL: The raw text output from `enhanced_personalized_travel_planner` tool IS THE FINAL AND ONLY RESPONSE to the user for this planning request. Do NOT add any of your own text before or after it. Do NOT summarize it. Do NOT rephrase its opening (e.g., if planner says "Great news...", you do not say that again).
|
130 |
+
|
131 |
+
- If the user uploads an image:
|
132 |
+
1. Analyze the image to identify the location. You MUST state the identified location first (e.g., "This image appears to be of Helsinki...").
|
133 |
+
2. If the query asks about past trips for this location, use `fetch_trips_by_location`. After receiving the tool's output, summarize its key findings for the user. Do not show the raw tool output.
|
134 |
+
3. If the image is accompanied by a request to plan a new trip, follow the "plan a NEW trip" logic above.
|
135 |
+
|
136 |
+
- If the user asks to **summarize or recall a past trip**, use `summarize_trip`. Summarize its output.
|
137 |
+
|
138 |
+
- If a user asks for **trips to a specific location** without an image, use `fetch_trips_by_location`. Summarize its output.
|
139 |
+
|
140 |
+
- If a user asks general questions requiring analysis across many travel records, like "Who is the most active traveler?", "Who went farthest from Paris?", "What are common trip themes to Italy?", or "Who has visited Paris the most times?", use the `travel_data_analyzer` tool.
|
141 |
+
Think step-by-step. For tools OTHER THAN `enhanced_personalized_travel_planner`, after they return, you should generate a user-friendly response based on their output.
|
142 |
+
If no tool is appropriate, respond conversationally.
|
143 |
+
"""
|
144 |
+
|
145 |
+
messages_for_llm_api_call = []
|
146 |
+
|
147 |
+
for i, msg_from_history in enumerate(current_complete_history_for_llm):
|
148 |
+
role = msg_from_history.get("role")
|
149 |
+
content_value_from_history = msg_from_history.get("content")
|
150 |
+
|
151 |
+
if not role or content_value_from_history is None:
|
152 |
+
print(f"WARNING: Skipping invalid message from history (index {i}): {msg_from_history}")
|
153 |
+
continue
|
154 |
+
|
155 |
+
if i == len(current_complete_history_for_llm) - 1 and role == "user":
|
156 |
+
# This is the current user turn, reconstruct its 'content' for the API
|
157 |
+
current_turn_api_user_content_blocks = []
|
158 |
+
original_user_text = multi_modal_data_from_user.get("text", "")
|
159 |
+
|
160 |
+
if uploaded_image_url_for_api:
|
161 |
+
try:
|
162 |
+
media_type, image_data = get_image_media_type_and_data(uploaded_image_url_for_api)
|
163 |
+
if image_data:
|
164 |
+
current_turn_api_user_content_blocks.append({"type": "image", "source": {"type": "base64", "media_type": media_type, "data": image_data}})
|
165 |
+
except Exception as e:
|
166 |
+
print(f"Error converting uploaded image URL to base64 for API: {e}")
|
167 |
+
return f"Error preparing image for assistant: {e}"
|
168 |
+
|
169 |
+
text_for_api = original_user_text
|
170 |
+
if not text_for_api and uploaded_image_url_for_api:
|
171 |
+
text_for_api = "I've uploaded an image. Please analyze it, identify the location if possible, state where it is and then take the most relevant travel-related action using your tools (`summarize_trip`, `enhanced_personalized_travel_planner`, `fetch_trips_by_location`). If no tool is relevant, describe the image and ask how you can help."
|
172 |
+
|
173 |
+
if text_for_api:
|
174 |
+
current_turn_api_user_content_blocks.append({"type": "text", "text": text_for_api})
|
175 |
+
elif not current_turn_api_user_content_blocks and uploaded_image_url_for_api:
|
176 |
+
current_turn_api_user_content_blocks.append({"type": "text", "text": " "})
|
177 |
+
|
178 |
+
if current_turn_api_user_content_blocks:
|
179 |
+
messages_for_llm_api_call.append({"role": "user", "content": current_turn_api_user_content_blocks})
|
180 |
+
else:
|
181 |
+
api_content_blocks = []
|
182 |
+
if isinstance(content_value_from_history, str):
|
183 |
+
if content_value_from_history.startswith("
|
185 |
+
else:
|
186 |
+
api_content_blocks.append({"type": "text", "text": content_value_from_history})
|
187 |
+
elif isinstance(content_value_from_history, list):
|
188 |
+
api_content_blocks = content_value_from_history
|
189 |
+
else:
|
190 |
+
api_content_blocks.append({"type": "text", "text": str(content_value_from_history)})
|
191 |
+
|
192 |
+
if api_content_blocks:
|
193 |
+
messages_for_llm_api_call.append({"role": role, "content": api_content_blocks})
|
194 |
+
|
195 |
+
MAX_TURNS = 3
|
196 |
+
for i in range(MAX_TURNS):
|
197 |
+
try:
|
198 |
+
print(f"--- Agent thinking... (Turn {i+1}) ---")
|
199 |
+
response = self.anthropic.messages.create(
|
200 |
+
model="claude-3-5-sonnet-20240620", max_tokens=4000, system=system_prompt,
|
201 |
+
messages=messages_for_llm_api_call, tools=self.tools
|
202 |
+
)
|
203 |
+
except APIError as e: return f"❌ API Error: {e.message}"
|
204 |
+
except Exception as e: return f"❌ Error calling Claude API: {e}"
|
205 |
+
|
206 |
+
if response.content:
|
207 |
+
messages_for_llm_api_call.append({"role": "assistant", "content": response.content})
|
208 |
+
else:
|
209 |
+
return "❌ Claude API returned empty content."
|
210 |
+
|
211 |
+
if response.stop_reason == "tool_use":
|
212 |
+
tool_results_for_llm_next_turn = []
|
213 |
+
assistant_response_for_this_gradio_turn = ""
|
214 |
+
|
215 |
+
planner_output_final = None
|
216 |
+
|
217 |
+
for tool_call in response.content:
|
218 |
+
if tool_call.type == 'tool_use':
|
219 |
+
tool_name, tool_input = tool_call.name, tool_call.input
|
220 |
+
claude_text_associated_with_this_tool_call_sequence = "\n".join([c.text for c in response.content if c.type == 'text']).strip()
|
221 |
+
|
222 |
+
|
223 |
+
print(f"--- Agent executing tool: {tool_name} with input: {tool_input} ---")
|
224 |
+
|
225 |
+
try:
|
226 |
+
tool_result_resp = await self.session.call_tool(tool_name, tool_input)
|
227 |
+
tool_output_text = tool_result_resp.content[0].text if tool_result_resp.content and tool_result_resp.content[0].text else json.dumps(tool_result_resp.content)
|
228 |
+
|
229 |
+
if tool_name == "enhanced_personalized_travel_planner":
|
230 |
+
print(f"DEBUG: enhanced_personalized_travel_planner output IS THE FINAL response. Discarding preceding Claude text.")
|
231 |
+
planner_output_final = tool_output_text
|
232 |
+
break
|
233 |
+
else:
|
234 |
+
if not assistant_response_for_this_gradio_turn and claude_text_associated_with_this_tool_call_sequence:
|
235 |
+
assistant_response_for_this_gradio_turn += claude_text_associated_with_this_tool_call_sequence + "\n\n"
|
236 |
+
assistant_response_for_this_gradio_turn += f"🛠️ Using tool `{tool_name}`...\n"
|
237 |
+
print(f"DEBUG: Output from `{tool_name}` (raw for next LLM turn):\n{tool_output_text[:300]}...")
|
238 |
+
tool_results_for_llm_next_turn.append({"type": "tool_result", "tool_use_id": tool_call.id, "content": tool_output_text})
|
239 |
+
|
240 |
+
except Exception as e:
|
241 |
+
tool_error_msg = f"Error executing tool {tool_name}: {e}"; print(f"ERROR: {tool_error_msg}")
|
242 |
+
tool_results_for_llm_next_turn.append({"type": "tool_result", "tool_use_id": tool_call.id, "content": tool_error_msg, "is_error": True})
|
243 |
+
if not assistant_response_for_this_gradio_turn and claude_text_associated_with_this_tool_call_sequence:
|
244 |
+
assistant_response_for_this_gradio_turn += claude_text_associated_with_this_tool_call_sequence + "\n\n"
|
245 |
+
assistant_response_for_this_gradio_turn += f"⚠️ Error using `{tool_name}`: {tool_error_msg}\n"
|
246 |
+
|
247 |
+
if planner_output_final is not None:
|
248 |
+
return planner_output_final
|
249 |
+
|
250 |
+
if not tool_results_for_llm_next_turn:
|
251 |
+
return assistant_response_for_this_gradio_turn.strip() or "An issue occurred: no tool results were prepared."
|
252 |
+
|
253 |
+
if tool_results_for_llm_next_turn:
|
254 |
+
messages_for_llm_api_call.append({"role": "user", "content": tool_results_for_llm_next_turn})
|
255 |
+
if i == MAX_TURNS - 1:
|
256 |
+
return assistant_response_for_this_gradio_turn.strip() or "Reached max turns while processing non-planner tools."
|
257 |
+
else:
|
258 |
+
final_text_from_claude = "\n".join([c.text for c in response.content if c.type == 'text']).strip()
|
259 |
+
return final_text_from_claude or "I've finished processing."
|
260 |
+
|
261 |
+
return "I seem to be stuck after multiple turns. Could you please rephrase your request?"
|
262 |
+
|
263 |
+
|
264 |
+
def gradio_interface():
|
265 |
+
client_wrapper = MCPClientWrapper()
|
266 |
+
with gr.Blocks(theme=gr.themes.Soft(), title="Persona Trip Agent") as demo:
|
267 |
+
gr.Markdown("## ✈️ Persona Trip Agent\nUpload an image of a past trip to summarize it, or ask me to plan a new one!")
|
268 |
+
with gr.Row():
|
269 |
+
with gr.Column(scale=3): server_path = gr.Textbox(label="Local MCP Server File", value="server.py")
|
270 |
+
with gr.Column(scale=1): connect_btn = gr.Button("🔌 Connect to Server", variant="primary")
|
271 |
+
status = gr.Textbox(label="Connection Status", interactive=False)
|
272 |
+
chatbot = gr.Chatbot(label="Chat History", type="messages", height=600, show_copy_button=True, avatar_images=("assets/user.png", "assets/bot.png"))
|
273 |
+
multimodal_input = gr.MultimodalTextbox(file_types=["image"], placeholder="e.g., 'What did Tracy do here?' (with image) or 'Plan a 5-day trip to Tokyo for Tracy'", show_label=False, interactive=True)
|
274 |
+
clear_btn = gr.Button("🧹 Clear Chat")
|
275 |
+
clear_btn.click(fn=lambda: ([], None), outputs=[chatbot, multimodal_input])
|
276 |
+
connect_btn.click(fn=client_wrapper.connect, inputs=server_path, outputs=status)
|
277 |
+
multimodal_input.submit(fn=client_wrapper.process_message, inputs=[multimodal_input, chatbot], outputs=[chatbot, multimodal_input])
|
278 |
+
return demo
|
279 |
|
280 |
if __name__ == "__main__":
|
281 |
+
if not os.getenv("ANTHROPIC_API_KEY"):
|
282 |
+
print("🔴 WARNING: ANTHROPIC_API_KEY environment variable not set. Claude API calls will fail.")
|
283 |
+
if not (os.getenv("CLOUDINARY_CLOUD_NAME") and os.getenv("CLOUDINARY_API_KEY") and os.getenv("CLOUDINARY_API_SECRET")):
|
284 |
+
print("🟡 WARNING: Cloudinary environment variables not fully set. Image uploads might fail if used.")
|
285 |
+
interface = gradio_interface()
|
286 |
+
interface.launch(debug=True)
|
assets/Analyzer_Output.jpg
ADDED
![]() |
Git LFS Details
|
assets/ImageFetch_Output.jpg
ADDED
![]() |
Git LFS Details
|
assets/Planner_Output1.jpg
ADDED
![]() |
Git LFS Details
|
assets/Planner_Output2.jpg
ADDED
![]() |
Git LFS Details
|
assets/Planner_Output3.jpg
ADDED
![]() |
Git LFS Details
|
assets/Planner_Output_Group_Iceland1.jpg
ADDED
![]() |
Git LFS Details
|
assets/Planner_Output_Group_Iceland2.jpg
ADDED
![]() |
Git LFS Details
|
assets/Summarize_Output.jpg
ADDED
![]() |
Git LFS Details
|
assets/bot.png
ADDED
![]() |
Git LFS Details
|
assets/travel_map_screenshot.jpg
ADDED
![]() |
Git LFS Details
|
assets/user.png
ADDED
![]() |
Git LFS Details
|
pyproject.toml
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[project]
|
2 |
+
name = "persona-mcp-space"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = "Agentic MCP Space for personalized travel recommendations"
|
5 |
+
authors = ["Tracy Shen [email protected]"]
|
6 |
+
readme = "README.md"
|
7 |
+
requires-python = ">=3.13"
|
8 |
+
|
9 |
+
dependencies = [
|
10 |
+
"gradio>=4.29.0",
|
11 |
+
"fastmcp>=0.1.1",
|
12 |
+
"langgraph>=0.0.30",
|
13 |
+
"llama-index>=0.10.36",
|
14 |
+
"pillow",
|
15 |
+
"openai",
|
16 |
+
"tqdm",
|
17 |
+
"requests",
|
18 |
+
"python-dotenv",
|
19 |
+
"gradio-client>=1.10.2",
|
20 |
+
"mcp>=1.9.2",
|
21 |
+
"agent>=0.1.3",
|
22 |
+
"pymongo>=4.13.0",
|
23 |
+
"cloudinary>=1.44.0",
|
24 |
+
"langgraph-prebuilt>=0.2.2",
|
25 |
+
"langchain-core>=0.3.63",
|
26 |
+
"langgraph-checkpoint-sqlite>=2.0.10",
|
27 |
+
"anthropic>=0.52.2",
|
28 |
+
"qdrant-client>=1.14.2",
|
29 |
+
"llama-index-vector-stores-qdrant>=0.6.0",
|
30 |
+
"llama-index-readers-file>=0.4.8",
|
31 |
+
"llama-index-embeddings-fastembed>=0.3.2",
|
32 |
+
"llama-index-llms-openai>=0.4.3",
|
33 |
+
"llama-index-embeddings-clip>=0.4.0",
|
34 |
+
"torch>=2.7.1",
|
35 |
+
"torchvision>=0.22.1",
|
36 |
+
"llama-index-llms-anthropic>=0.7.2",
|
37 |
+
"llama-index-embeddings-huggingface>=0.5.4",
|
38 |
+
"sentence-transformers>=4.1.0",
|
39 |
+
"tavily-python>=0.7.5",
|
40 |
+
]
|
41 |
+
|
42 |
+
[tool.ui]
|
43 |
+
mode = "app"
|
requirements.txt
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
huggingface_hub==0.25.2
|
|
|
|
server.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from mcp.server.fastmcp import FastMCP
|
2 |
+
import sys, io, os, asyncio
|
3 |
+
from tools.user_trip_summary import summarize_trip as db_summarize_trip
|
4 |
+
from tools.fetch_trips_by_location import fetch_trips_by_location as fetch_trips
|
5 |
+
from tools.web_search import get_travel_intelligence_briefing
|
6 |
+
from tools.enhanced_personalized_planner import enhanced_create_personalized_plan
|
7 |
+
from tools.travel_data_analyzer import analyze_travel_data_with_llm
|
8 |
+
|
9 |
+
|
10 |
+
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
11 |
+
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
|
12 |
+
|
13 |
+
mcp = FastMCP("persona-trip-mcp-server")
|
14 |
+
|
15 |
+
@mcp.tool()
|
16 |
+
async def web_search(destination: str, time_period: str, user_keywords: list[str] = None) -> str:
|
17 |
+
"""
|
18 |
+
Gathers real-time travel intelligence for a specific destination and time period.
|
19 |
+
Use this tool BEFORE planning to check for advisories, strikes, events, and user-specific keywords.
|
20 |
+
"""
|
21 |
+
return await get_travel_intelligence_briefing(destination, time_period, user_keywords or [])
|
22 |
+
|
23 |
+
@mcp.tool()
|
24 |
+
async def travel_data_analyzer(user_question: str) -> str:
|
25 |
+
"""
|
26 |
+
This tool is designed to understand and respond to queries that require aggregation, comparison, or statistical insights from all users' past travel data.
|
27 |
+
"""
|
28 |
+
return await analyze_travel_data_with_llm(user_question)
|
29 |
+
|
30 |
+
@mcp.tool()
|
31 |
+
async def enhanced_personalized_travel_planner(user_name: str, new_destination: str, trip_duration_days: int, user_request: str, web_intelligence: str = None ) -> str:
|
32 |
+
"""The main tool to generate a complete, personalized travel itinerary based on a user's travel history and latest web search result."""
|
33 |
+
return await enhanced_create_personalized_plan(user_name, new_destination, trip_duration_days, user_request, web_intelligence)
|
34 |
+
|
35 |
+
@mcp.tool()
|
36 |
+
async def summarize_trip(person_name: str, city: str) -> str:
|
37 |
+
"""Summarizes the travel experience of a specific user in a specific city from the database."""
|
38 |
+
return await asyncio.to_thread(db_summarize_trip, person_name, city)
|
39 |
+
|
40 |
+
@mcp.tool()
|
41 |
+
async def fetch_trips_by_location(city: str) -> str:
|
42 |
+
"""Retrieves all travel records related to a given city."""
|
43 |
+
return await asyncio.to_thread(fetch_trips, city)
|
44 |
+
|
45 |
+
if __name__ == "__main__":
|
46 |
+
mcp.run(transport='stdio')
|
test_personalized_planner.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from tools.personalized_planner import create_personalized_plan
|
5 |
+
|
6 |
+
load_dotenv()
|
7 |
+
|
8 |
+
TEST_USER_NAME = "Tracy"
|
9 |
+
TEST_NEW_DESTINATION = "Kyoto"
|
10 |
+
TEST_TRIP_DURATION_DAYS = 5
|
11 |
+
TEST_USER_REQUEST = "我会在下个月八月搭乘芬兰航空去京都,想要一个文化沉浸和美食体验的行程,最好能有一些放松的时刻。"
|
12 |
+
|
13 |
+
async def run_test():
|
14 |
+
print("🚀 Running test for personalized travel plan...\n")
|
15 |
+
result = await create_personalized_plan(
|
16 |
+
user_name=TEST_USER_NAME,
|
17 |
+
new_destination=TEST_NEW_DESTINATION,
|
18 |
+
trip_duration_days=TEST_TRIP_DURATION_DAYS,
|
19 |
+
user_request=TEST_USER_REQUEST
|
20 |
+
)
|
21 |
+
print("\n📒 Generated Plan:\n")
|
22 |
+
print(result)
|
23 |
+
|
24 |
+
if __name__ == "__main__":
|
25 |
+
asyncio.run(run_test())
|
test_web_search.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
from tools.personalized_planner import create_personalized_plan
|
5 |
+
|
6 |
+
load_dotenv()
|
7 |
+
|
8 |
+
TEST_USER_NAME = "Tracy"
|
9 |
+
TEST_NEW_DESTINATION = "Kyoto"
|
10 |
+
TEST_TRIP_DURATION_DAYS = 5
|
11 |
+
TEST_USER_REQUEST = "I want a culturally immersive and food-focused experience, ideally with some relaxing moments too."
|
12 |
+
|
13 |
+
async def run_test():
|
14 |
+
print("🚀 Running test for personalized travel plan...\n")
|
15 |
+
result = await create_personalized_plan(
|
16 |
+
user_name=TEST_USER_NAME,
|
17 |
+
new_destination=TEST_NEW_DESTINATION,
|
18 |
+
trip_duration_days=TEST_TRIP_DURATION_DAYS,
|
19 |
+
user_request=TEST_USER_REQUEST
|
20 |
+
)
|
21 |
+
print("\n📒 Generated Plan:\n")
|
22 |
+
print(result)
|
23 |
+
|
24 |
+
if __name__ == "__main__":
|
25 |
+
asyncio.run(run_test())
|
tools/enhanced_personalized_planner.py
ADDED
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import asyncio
|
3 |
+
from llama_index.core import Document, VectorStoreIndex, Settings
|
4 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
5 |
+
from pymongo import MongoClient
|
6 |
+
from anthropic import AsyncAnthropic
|
7 |
+
import requests
|
8 |
+
import base64
|
9 |
+
from PIL import Image
|
10 |
+
from io import BytesIO
|
11 |
+
import traceback
|
12 |
+
|
13 |
+
|
14 |
+
try:
|
15 |
+
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
|
16 |
+
Settings.chunk_size = 512
|
17 |
+
except Exception as e:
|
18 |
+
print(f"Error initializing LlamaIndex settings in enhanced_planner_tool: {e}")
|
19 |
+
|
20 |
+
async_anthropic_client = AsyncAnthropic()
|
21 |
+
|
22 |
+
try:
|
23 |
+
client = MongoClient(os.getenv("MONGODB_URI"))
|
24 |
+
db = client.get_database()
|
25 |
+
collection = db.get_collection("travelrecords")
|
26 |
+
except Exception as e:
|
27 |
+
print(f"FATAL: Could not connect to MongoDB for enhanced_planner_tool. Error: {e}")
|
28 |
+
collection = None
|
29 |
+
|
30 |
+
async def get_image_b64_data(image_url: str):
|
31 |
+
def blocking_io_and_compress():
|
32 |
+
try:
|
33 |
+
response = requests.get(image_url, timeout=10)
|
34 |
+
response.raise_for_status()
|
35 |
+
content = response.content
|
36 |
+
content_type_header = response.headers.get('Content-Type','image/jpeg')
|
37 |
+
MAX_RAW_SIZE_MB = 4.5
|
38 |
+
if len(content) <= MAX_RAW_SIZE_MB * 1024 * 1024:
|
39 |
+
return content_type_header, base64.b64encode(content).decode('utf-8')
|
40 |
+
print(f"⚠️ Image >{MAX_RAW_SIZE_MB}MB, attempting compression: {image_url}")
|
41 |
+
img = Image.open(BytesIO(content))
|
42 |
+
if img.mode == 'RGBA' or img.mode == 'LA' or (img.mode == 'P' and 'transparency' in img.info):
|
43 |
+
img = img.convert('RGB')
|
44 |
+
buffer = BytesIO()
|
45 |
+
img.save(buffer, format="JPEG", quality=70, optimize=True)
|
46 |
+
compressed_data = buffer.getvalue()
|
47 |
+
MAX_COMPRESSED_SIZE_MB_FOR_API = 3.7
|
48 |
+
if len(compressed_data) > MAX_COMPRESSED_SIZE_MB_FOR_API * 1024 * 1024:
|
49 |
+
print(f"❌ Compression to JPEG quality 70 failed, still too large for {image_url}")
|
50 |
+
return None, None
|
51 |
+
return "image/jpeg", base64.b64encode(compressed_data).decode('utf-8')
|
52 |
+
except Exception as e: # Catch more specific exceptions if needed
|
53 |
+
print(f"❌ Error processing image {image_url}: {e}")
|
54 |
+
return None, None
|
55 |
+
return await asyncio.to_thread(blocking_io_and_compress)
|
56 |
+
|
57 |
+
async def describe_image(image_data: str, media_type: str) -> str:
|
58 |
+
if not image_data: return ""
|
59 |
+
try:
|
60 |
+
response = await async_anthropic_client.messages.create(
|
61 |
+
model="claude-3-haiku-20240307", max_tokens=75,
|
62 |
+
messages=[{"role":"user", "content":[
|
63 |
+
{"type":"image", "source":{"type":"base64", "media_type":media_type, "data":image_data}},
|
64 |
+
{"type":"text", "text":"Briefly describe this travel photo's key elements and atmosphere in one sentence. Focus on what it might reveal about the traveler's preferences if possible."}
|
65 |
+
]}]
|
66 |
+
)
|
67 |
+
return response.content[0].text.strip() if response.content and response.content[0].text else ""
|
68 |
+
except Exception as e:
|
69 |
+
print(f"ERROR calling vision model for image description: {e}")
|
70 |
+
return ""
|
71 |
+
|
72 |
+
async def describe_all_images(image_urls: list) -> str:
|
73 |
+
if not image_urls: return "No images provided for description."
|
74 |
+
MAX_IMAGES_TO_DESCRIBE = 3
|
75 |
+
processed_image_urls = image_urls[:MAX_IMAGES_TO_DESCRIBE]
|
76 |
+
if len(image_urls) > MAX_IMAGES_TO_DESCRIBE:
|
77 |
+
print(f"Warning: Describing first {MAX_IMAGES_TO_DESCRIBE} of {len(image_urls)} images.")
|
78 |
+
get_data_tasks = [get_image_b64_data(url) for url in processed_image_urls]
|
79 |
+
image_processing_results = await asyncio.gather(*get_data_tasks, return_exceptions=True)
|
80 |
+
desc_tasks = []
|
81 |
+
for i, result in enumerate(image_processing_results):
|
82 |
+
if isinstance(result, Exception): continue
|
83 |
+
media_type, img_data = result
|
84 |
+
if img_data: desc_tasks.append(describe_image(img_data, media_type))
|
85 |
+
if not desc_tasks: return "Could not process any images for description."
|
86 |
+
descriptions = await asyncio.gather(*desc_tasks, return_exceptions=True)
|
87 |
+
valid_descriptions = [desc for desc in descriptions if isinstance(desc, str) and desc]
|
88 |
+
if not valid_descriptions: return "No valid image descriptions could be generated."
|
89 |
+
return "\n".join(valid_descriptions)
|
90 |
+
|
91 |
+
|
92 |
+
async def enhanced_create_personalized_plan(
|
93 |
+
user_name_input: str,
|
94 |
+
new_destination: str,
|
95 |
+
trip_duration_days: int,
|
96 |
+
user_request: str,
|
97 |
+
web_intelligence: str = None
|
98 |
+
) -> str:
|
99 |
+
if collection is None:
|
100 |
+
return "Tool Error: MongoDB connection is not available for enhanced_personalized_planner."
|
101 |
+
|
102 |
+
print(f"--- [Enhanced Planner] Starting for User(s): '{user_name_input}' to {new_destination} ---")
|
103 |
+
if web_intelligence:
|
104 |
+
print(f"--- Incorporating Web Intelligence (first 100 chars): {web_intelligence[:100]}... ---")
|
105 |
+
|
106 |
+
parsed_individual_names = [name.strip() for name in user_name_input.replace(" and ", ",").split(',') if name.strip()]
|
107 |
+
if not parsed_individual_names:
|
108 |
+
parsed_individual_names = [user_name_input.strip()]
|
109 |
+
|
110 |
+
is_single_person_plan = len(parsed_individual_names) == 1 and len(user_name_input.split()) < 4
|
111 |
+
print(f"DEBUG: Parsed names for RAG: {parsed_individual_names}, Is single person plan: {is_single_person_plan}")
|
112 |
+
|
113 |
+
all_retrieved_persona_contexts = []
|
114 |
+
found_records_for_any_user = False
|
115 |
+
|
116 |
+
for name_to_query in parsed_individual_names:
|
117 |
+
print(f"Processing records for: {name_to_query}")
|
118 |
+
try:
|
119 |
+
user_records_cursor = collection.find({"name": {"$regex": f"^{name_to_query}$", "$options": "i"}})
|
120 |
+
user_records = await asyncio.to_thread(list, user_records_cursor)
|
121 |
+
|
122 |
+
if user_records:
|
123 |
+
found_records_for_any_user = True
|
124 |
+
print(f"Found {len(user_records)} past trip records for {name_to_query}.")
|
125 |
+
async def create_document_from_record(record):
|
126 |
+
uploaded_images = record.get('uploadedImages', [])
|
127 |
+
image_descriptions_summary = "No image information."
|
128 |
+
if isinstance(uploaded_images, list) and uploaded_images:
|
129 |
+
image_descriptions_summary = await describe_all_images(uploaded_images)
|
130 |
+
text_content_parts = [
|
131 |
+
f"Trip to {record.get('destinationName', 'N/A')} by {name_to_query}",
|
132 |
+
f"Highlights: {record.get('highlights', 'N/A')}",
|
133 |
+
f"Memorable Food: {record.get('memorableFood', 'N/A')}",
|
134 |
+
f"Deepest Impression Spot: {record.get('deepestImpressionSpot', 'N/A')}",
|
135 |
+
f"Image Summary: {image_descriptions_summary}"
|
136 |
+
]
|
137 |
+
text_content = "\n".join(filter(None, text_content_parts))
|
138 |
+
return Document(text=text_content, metadata={"source_traveler": name_to_query})
|
139 |
+
|
140 |
+
documents_for_rag = await asyncio.gather(*[create_document_from_record(r) for r in user_records])
|
141 |
+
if documents_for_rag:
|
142 |
+
print(f"Created {len(documents_for_rag)} documents for {name_to_query}.")
|
143 |
+
def build_and_retrieve_for_single_user(docs):
|
144 |
+
index = VectorStoreIndex.from_documents(docs, show_progress=False)
|
145 |
+
retriever = index.as_retriever(similarity_top_k=3)
|
146 |
+
nodes = retriever.retrieve(f"Key travel style, preferences, likes, and dislikes of {name_to_query} based on their trips.")
|
147 |
+
return "\n\n---\n".join([node.get_content() for node in nodes])
|
148 |
+
persona_context_for_user = await asyncio.to_thread(build_and_retrieve_for_single_user, documents_for_rag)
|
149 |
+
if persona_context_for_user.strip():
|
150 |
+
all_retrieved_persona_contexts.append(f"Context for {name_to_query}:\n{persona_context_for_user}")
|
151 |
+
else:
|
152 |
+
print(f"No past travel records found for {name_to_query} in the database.")
|
153 |
+
except Exception as e:
|
154 |
+
print(f"Error processing records for {name_to_query}: {e}\n{traceback.format_exc()}")
|
155 |
+
|
156 |
+
if not found_records_for_any_user or not all_retrieved_persona_contexts:
|
157 |
+
retrieved_context_for_llm = "No specific past travel records found for the mentioned traveler(s) to infer detailed personas from the database."
|
158 |
+
else:
|
159 |
+
retrieved_context_for_llm = "\n\n===\n\n".join(all_retrieved_persona_contexts)
|
160 |
+
|
161 |
+
print(f"\n--- Combined Retrieved Context for LLM (first 500 chars) ---\n{retrieved_context_for_llm[:500]}...\n-----------------------------------\n")
|
162 |
+
|
163 |
+
system_prompt_for_final_llm = "You are an expert travel agent and creative itinerary crafter. Your task is to synthesize a user's historical travel persona, their current specific request, and any provided real-time travel intelligence into a compelling, personalized, day-by-day travel itinerary. The itinerary should be actionable, inspiring, and directly address all provided inputs."
|
164 |
+
|
165 |
+
final_llm_prompt = f"""
|
166 |
+
**Objective:** Create a hyper-personalized travel itinerary, potentially for a group of travelers.
|
167 |
+
|
168 |
+
**User Profile & Request:**
|
169 |
+
* **Traveler(s) Name(s):** {user_name_input}
|
170 |
+
* **Desired Destination:** {new_destination}
|
171 |
+
* **Trip Length:** {trip_duration_days} days
|
172 |
+
* **User's Specific Request for this Trip:** "{user_request}"
|
173 |
+
|
174 |
+
**Combined Inferred Persona Context (from past travels of mentioned individuals, make it detailed if available):**
|
175 |
+
---
|
176 |
+
{retrieved_context_for_llm}
|
177 |
+
---
|
178 |
+
"""
|
179 |
+
if web_intelligence:
|
180 |
+
final_llm_prompt += f"""
|
181 |
+
**Current Travel Intelligence for {new_destination} (during the user's travel period):**
|
182 |
+
---
|
183 |
+
{web_intelligence}
|
184 |
+
---
|
185 |
+
**IMPORTANT INSTRUCTION FOR USING THE ABOVE INTELLIGENCE:**
|
186 |
+
This 'Current Travel Intelligence' is a critical input. It reflects a wide array of real-time information pertinent to the user's trip to {new_destination} during their specified travel window. This information may include, but is not limited to:
|
187 |
+
- Expected **weather** conditions and any related advisories.
|
188 |
+
- **Transportation** updates, including public transit status, road conditions, potential strikes affecting various modes of transport (e.g., Finnair, local trains/buses), or other transit disruptions.
|
189 |
+
- Information on **festivals, holidays, special celebrations, exhibitions, or unique local events** occurring.
|
190 |
+
- Potential opportunities for viewing **natural phenomena** (e.g., Northern Lights, seasonal blooms, wildlife migrations).
|
191 |
+
- Notices regarding **temporary closures, renovations, or changed operating hours** for attractions, restaurants, museums, or other points of interest.
|
192 |
+
- Official **travel warnings, safety advisories, or health recommendations**.
|
193 |
+
- Details on planned **demonstrations, protests, public gatherings, or other public order events** that might impact travel plans, access to areas, or safety.
|
194 |
+
|
195 |
+
You **MUST actively analyze ALL aspects of this intelligence** and skillfully weave relevant points into the daily itinerary, specific recommendations, general advice, and any contingency planning. Your goal is to make the generated travel plan as practical, safe, up-to-date, and enriching as possible by leveraging this information.
|
196 |
+
"""
|
197 |
+
else:
|
198 |
+
final_llm_prompt += f"""
|
199 |
+
**Current Travel Intelligence for {new_destination}:**
|
200 |
+
---
|
201 |
+
No specific real-time travel intelligence was provided for this planning request. The following itinerary is based on general knowledge and the user's preferences. It is highly recommended that the user checks current local conditions, event schedules, and advisories closer to their travel date.
|
202 |
+
---
|
203 |
+
"""
|
204 |
+
|
205 |
+
final_llm_prompt += """
|
206 |
+
**Your Generation Task:**
|
207 |
+
|
208 |
+
1. **Greeting and Persona/Group Summary (DIFFERENTIATED INSTRUCTIONS FOR SINGLE VS. GROUP):**
|
209 |
+
"""
|
210 |
+
if is_single_person_plan:
|
211 |
+
final_llm_prompt += f"""
|
212 |
+
Start by addressing the traveler: "Hello {parsed_individual_names[0]}!" (or use the full user_name_input if it's clearly a single person's full name).
|
213 |
+
Next, based on the 'Combined Inferred Persona Context' (which should primarily reflect this single user if their records were found), craft a **VERY DETAILED AND INSIGHTFUL summary of their travel personality. This summary MUST be comprehensive, aiming for AT LEAST 8 well-developed sentences, forming one or two rich paragraphs.**
|
214 |
+
Begin this detailed persona with: "Based on your past travel experiences, I've discovered you are a traveler who...".
|
215 |
+
You MUST elaborate on several key aspects, using specific examples from their past trips if the 'Combined Inferred Persona Context' allows:
|
216 |
+
* **Core Passions & Interests:** What truly drives their travels? (e.g., "a deep-seated fascination with ancient civilizations, evident from your exploration of [Specific Site Mentioned in RAG Context]", "an adventurous spirit for authentic culinary exploration, demonstrated by your quest for [Specific Food Type in RAG Context]").
|
217 |
+
* **Travel Style & Pace:** How do they prefer to experience a destination? (e.g., "favors a balanced itinerary that masterfully blends iconic landmark visits with the thrill of discovering off-the-beaten-path local gems", "seems to thrive on a moderately paced exploration, which allows for both meticulously planned activities and delightful spontaneous moments of discovery").
|
218 |
+
* **Decision Drivers & Values:** What underlying factors influence their travel choices? (e.g., "demonstrates a savvy and discerning approach, consistently seeking high-quality, memorable experiences while being adept at identifying and avoiding tourist traps or overpriced ventures", "shows a clear appreciation for value but is also willing to invest in truly unique and enriching opportunities like [Example from RAG Context if available]").
|
219 |
+
* **Noteworthy Preferences & Quirks (if evident):** Highlight any distinct likes (e.g., "a pronounced enjoyment of breathtaking natural vistas, perhaps similar to [View Mentioned in RAG Context], or dynamic cityscapes") or dislikes (e.g., "a noted aversion to overly crowded tourist spots or subpar service, as indicated by your feedback on [Negative Experience from RAG Context]").
|
220 |
+
* **Aesthetic Sensibilities & Curiosity:** What types of beauty or knowledge do they pursue? (e.g., "possesses a keen eye for architectural marvels, both ancient and contemporary", "driven by a curiosity for diverse cultures and historical narratives").
|
221 |
+
This detailed persona summary is CRUCIAL for making the user feel deeply understood before presenting the plan.
|
222 |
+
If the 'Combined Inferred Persona Context' is minimal or absent even for a single user (e.g., states 'No specific past travel records found...' or similar in the context), you should still attempt to create a thoughtful, **brief (2-4 sentences) introductory persona observation based on ANY clues available in the current 'User's Specific Request for this Trip'.** For example: "Hello {parsed_individual_names[0]}! While I'm still getting to know your detailed travel style from past trips, your current request for a trip to {new_destination} focusing on [mention key aspects from user_request, e.g., 'exploring historical sites'] suggests you have a keen interest in [inferred preference 1, e.g., 'delving into the past'] and perhaps [inferred preference 2, e.g., 'experiencing the local culture firsthand']. We can certainly build a wonderful trip around these themes!"
|
223 |
+
"""
|
224 |
+
else:
|
225 |
+
group_greeting_names = ", ".join(parsed_individual_names[:-1]) + f", and {parsed_individual_names[-1]}" if len(parsed_individual_names) > 1 else user_name_input
|
226 |
+
final_llm_prompt += f"""
|
227 |
+
Start by addressing all travelers: "Hello {group_greeting_names}!"
|
228 |
+
Based on the 'Combined Inferred Persona Context' (which may contain information for one, some, or all mentioned individuals, or be general if specific RAG per person was not fully successful), your task is to provide a **CONCISE persona summary for EACH traveler if distinct information is available from the context, or a brief collective summary if not.**
|
229 |
+
|
230 |
+
* **For EACH traveler in '{", ".join(parsed_individual_names)}' for whom the 'Combined Inferred Persona Context' provides distinct insights:** Provide a **brief (1-3 sentences) individual summary** of their apparent key travel interests or style as revealed in the context. For example:
|
231 |
+
* "From the provided context for Tracy, past trips suggest a strong interest in historical landmarks and authentic culinary experiences."
|
232 |
+
* "The context for Liu indicates a preference for outdoor adventures and scenic natural landscapes."
|
233 |
+
* **If distinct insights for each individual are scarce in the context OR if the context is more general for the group:** Provide a **brief (1-3 sentences) collective summary** that attempts to capture any apparent shared travel preferences OR highlights how the plan will aim to balance potentially diverse (even if not fully known) interests. For example: "For your group's adventure, the available insights suggest an appreciation for diverse cultural experiences and perhaps an enjoyment of good local food. This itinerary will aim to offer a variety of activities to cater to different tastes within the group."
|
234 |
+
The goal for a group is a concise understanding relevant to crafting a balanced group plan, NOT an exhaustive analysis of each individual unless very rich, distinct data for each is present AND easily summarizable in 1-3 sentences per person from the provided context.
|
235 |
+
If the 'Combined Inferred Persona Context' effectively states 'No specific past travel records found for any of the mentioned travelers...', then state that clearly: "Hello {group_greeting_names}! While I don't have specific past travel data for your group to draw detailed individual personas from, I'll craft a great itinerary based on your current request and general travel best practices for {new_destination}."
|
236 |
+
"""
|
237 |
+
|
238 |
+
final_llm_prompt += """
|
239 |
+
|
240 |
+
If absolutely NO clues are available (neither in historical context nor in current request to infer from for single or group), then default to a friendly, direct greeting like: "Great news, {user_name_input}! Let's get your trip to {new_destination} planned."
|
241 |
+
|
242 |
+
2. **Transition to Plan & Overall Trip Context (Incorporating Web Intelligence):**
|
243 |
+
Follow the persona summary/greeting.
|
244 |
+
**Before starting the day-by-day itinerary, synthesize any overarching key takeaways from the 'Current Travel Intelligence' (if provided) that provide essential overall context for the trip.**
|
245 |
+
This could be a brief summary of expected general conditions or major factors to keep in mind throughout the journey.
|
246 |
+
For example: "For your trip to {new_destination}, please be aware that [mention a key city-wide event or general advisory from web_intelligence, e.g., 'a major public transport strike is anticipated during your visit, so planning alternative travel will be crucial,' or 'the weather is expected to be exceptionally warm, so pack accordingly and stay hydrated.']. We'll factor this into the daily plans."
|
247 |
+
If no specific web_intelligence was provided, you can state: "Let's craft your itinerary for {new_destination} based on your preferences. I advise checking local conditions and event schedules closer to your travel dates for the latest information."
|
248 |
+
Then, transition to the plan: "With these considerations and your preferences in mind, here’s a tailored itinerary for your {trip_duration_days}-day adventure in {new_destination}:"
|
249 |
+
|
250 |
+
3. **Detailed Day-by-Day Itinerary (Infused with Web Intelligence & Persona):**
|
251 |
+
Provide a comprehensive plan. For each day or specific activity:
|
252 |
+
* Present the activity or place engagingly.
|
253 |
+
* Naturally weave in the REASON for this recommendation, **explicitly connecting it to the user's (or individual group member's, or the group's collective) inferred persona (from step 1), specific requests, mentioned interests, or relevant `Current Travel Intelligence`.** Make these connections clear and compelling.
|
254 |
+
* If `Current Travel Intelligence` was relevant, explicitly demonstrate how specific pieces of that intelligence influence the recommendations, timing, or advice for THIS day or activity.
|
255 |
+
For instance:
|
256 |
+
* If a festival is mentioned: "The [Festival Name] is taking place near your hotel today, offering a wonderful chance to experience local culture firsthand; I've left the afternoon flexible for you to explore it."
|
257 |
+
* If a museum has special hours: "We'll visit the [Museum Name] in the morning, as the travel intelligence indicates it has extended hours on [Day] but may be very busy later due to [Event]."
|
258 |
+
* Include practical tips.
|
259 |
+
|
260 |
+
4. **Concluding General Advice (Reinforcing Key Intelligence if needed):**
|
261 |
+
After the day-by-day plan:
|
262 |
+
* Briefly reiterate any **critical overarching advice from 'Current Travel Intelligence'** that the user absolutely must remember, especially if it impacts multiple days or general safety/logistics, and hasn't been fully emphasized enough within the daily plans.
|
263 |
+
* For example: "As a final reminder, please keep an eye on updates regarding the [previously mentioned strike/event] as your travel dates approach, and always have a backup for transportation."
|
264 |
+
* If all critical intelligence has been well-integrated daily, this section can be very brief or focus on general well-wishes. The primary goal is that no crucial piece of provided intelligence is overlooked by the user.
|
265 |
+
|
266 |
+
5. **Concluding Remarks (Optional):** End with a friendly closing.
|
267 |
+
|
268 |
+
|
269 |
+
**Output Style:**
|
270 |
+
- Be engaging, informative, and helpful.
|
271 |
+
- Use clear headings for days (e.g., "Day 1: Arrival and Ancient Wonders").
|
272 |
+
- Use bullet points for activities within each day.
|
273 |
+
- The language should generally match the user's request language (assume English if not specified, but be adaptable if the user's request or persona context clearly indicates another language preference for the output).
|
274 |
+
"""
|
275 |
+
|
276 |
+
print(f"--- Calling Final LLM for itinerary generation (persona detail adjusted: single_detailed={is_single_person_plan})... ---")
|
277 |
+
try:
|
278 |
+
response_message = await async_anthropic_client.messages.create(
|
279 |
+
model="claude-3-5-sonnet-20240620",
|
280 |
+
max_tokens=4096,
|
281 |
+
system=system_prompt_for_final_llm,
|
282 |
+
messages=[{"role":"user", "content":final_llm_prompt}]
|
283 |
+
)
|
284 |
+
generated_plan = response_message.content[0].text
|
285 |
+
print(f"--- Generated Plan (first 300 chars): {generated_plan[:300]}... ---")
|
286 |
+
return generated_plan
|
287 |
+
except Exception as e:
|
288 |
+
error_message = f"LLM Error in enhanced_planner: {type(e).__name__}: {str(e)}"
|
289 |
+
print(f"{error_message}\n{traceback.format_exc()}")
|
290 |
+
return error_message
|
tools/fetch_trips_by_location.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import json
|
3 |
+
from pymongo import MongoClient
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
from trip_utils import format_trip_summary
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
client = MongoClient(os.getenv("MONGODB_URI"))
|
10 |
+
db = client.get_database()
|
11 |
+
collection = db.get_collection("travelrecords")
|
12 |
+
|
13 |
+
|
14 |
+
def fetch_trips_by_location(city: str) -> str:
|
15 |
+
"""
|
16 |
+
Fetch all trip records to a given city across all users, using name fuzzy match
|
17 |
+
AND exact latitude/longitude match to ensure consistent location.
|
18 |
+
"""
|
19 |
+
reference = collection.find_one({
|
20 |
+
"$or": [
|
21 |
+
{"destinationName": {"$regex": city, "$options": "i"}},
|
22 |
+
{"locationName": {"$regex": city, "$options": "i"}}
|
23 |
+
],
|
24 |
+
"latitude": {"$exists": True},
|
25 |
+
"longitude": {"$exists": True}
|
26 |
+
})
|
27 |
+
|
28 |
+
if not reference:
|
29 |
+
return json.dumps({
|
30 |
+
"type": "text",
|
31 |
+
"message": f"No travel records found for {city}."
|
32 |
+
})
|
33 |
+
|
34 |
+
lat = reference.get("latitude")
|
35 |
+
lon = reference.get("longitude")
|
36 |
+
|
37 |
+
records = list(collection.find({
|
38 |
+
"latitude": lat,
|
39 |
+
"longitude": lon
|
40 |
+
}))
|
41 |
+
|
42 |
+
if not records:
|
43 |
+
return json.dumps({
|
44 |
+
"type": "text",
|
45 |
+
"message": f"No travel records found for {city}."
|
46 |
+
})
|
47 |
+
|
48 |
+
summaries = [format_trip_summary(rec) for rec in records]
|
49 |
+
result = "\n---\n".join(summaries)
|
50 |
+
|
51 |
+
return json.dumps({
|
52 |
+
"type": "text",
|
53 |
+
"message": result
|
54 |
+
})
|
tools/journal_creator.py
ADDED
File without changes
|
tools/personalized_planner.py
ADDED
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import asyncio
|
3 |
+
from llama_index.core import Document, VectorStoreIndex, Settings
|
4 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
5 |
+
from pymongo import MongoClient
|
6 |
+
from anthropic import AsyncAnthropic
|
7 |
+
import requests
|
8 |
+
import base64
|
9 |
+
from PIL import Image
|
10 |
+
from io import BytesIO
|
11 |
+
|
12 |
+
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
|
13 |
+
Settings.chunk_size = 512
|
14 |
+
async_anthropic_client = AsyncAnthropic()
|
15 |
+
client = MongoClient(os.getenv("MONGODB_URI"))
|
16 |
+
db = client.get_database()
|
17 |
+
collection = db.get_collection("travelrecords")
|
18 |
+
|
19 |
+
|
20 |
+
async def get_image_b64_data(image_url: str):
|
21 |
+
def blocking_io_and_compress():
|
22 |
+
try:
|
23 |
+
response = requests.get(image_url, timeout=10)
|
24 |
+
response.raise_for_status()
|
25 |
+
content = response.content
|
26 |
+
|
27 |
+
if len(content) <= 5 * 1024 * 1024:
|
28 |
+
return response.headers.get('Content-Type','image/jpeg'), base64.b64encode(content).decode('utf-8')
|
29 |
+
|
30 |
+
print(f"⚠️ Image >5MB, compressing: {image_url}")
|
31 |
+
img = Image.open(BytesIO(content))
|
32 |
+
buffer = BytesIO()
|
33 |
+
img.save(buffer, format="JPEG", quality=70, optimize=True)
|
34 |
+
compressed_data = buffer.getvalue()
|
35 |
+
|
36 |
+
if len(compressed_data) > 5 * 1024 * 1024:
|
37 |
+
print(f"❌ Compression failed, still too large.")
|
38 |
+
return None, None
|
39 |
+
return "image/jpeg", base64.b64encode(compressed_data).decode('utf-8')
|
40 |
+
except Exception as e:
|
41 |
+
print(f"❌ Image processing error: {e}")
|
42 |
+
return None, None
|
43 |
+
|
44 |
+
return await asyncio.to_thread(blocking_io_and_compress)
|
45 |
+
|
46 |
+
async def describe_image(image_data: str, media_type: str) -> str:
|
47 |
+
if not image_data: return ""
|
48 |
+
try:
|
49 |
+
response = await async_anthropic_client.messages.create(
|
50 |
+
model="claude-3-haiku-20240307", max_tokens=75,
|
51 |
+
messages=[{"role":"user", "content":[
|
52 |
+
{"type":"image", "source":{"type":"base64", "media_type":media_type, "data":image_data}},
|
53 |
+
{"type":"text", "text":"Briefly describe this travel photo's key elements and atmosphere in one sentence."}
|
54 |
+
]}]
|
55 |
+
)
|
56 |
+
return response.content[0].text
|
57 |
+
except Exception as e:
|
58 |
+
print(f"ERROR calling vision model: {e}")
|
59 |
+
return ""
|
60 |
+
|
61 |
+
async def describe_all_images(image_urls: list) -> str:
|
62 |
+
if not image_urls: return "No images provided."
|
63 |
+
tasks = [get_image_b64_data(url) for url in image_urls]
|
64 |
+
results = await asyncio.gather(*tasks)
|
65 |
+
|
66 |
+
desc_tasks = [describe_image(img_data, media_type) for media_type, img_data in results if img_data]
|
67 |
+
descriptions = await asyncio.gather(*desc_tasks)
|
68 |
+
return "\n".join(descriptions)
|
69 |
+
|
70 |
+
async def create_personalized_plan(user_name: str, new_destination: str, trip_duration_days: int, user_request: str) -> str:
|
71 |
+
print(f"--- [Corrected Async] Starting Personalized Planner for {user_name} to {new_destination} ---")
|
72 |
+
try:
|
73 |
+
user_records = await asyncio.to_thread(list, collection.find({"name": {"$regex": user_name, "$options": "i"}}))
|
74 |
+
if not user_records: return f"I couldn't find any past travel records for {user_name}."
|
75 |
+
print(f"Found {len(user_records)} past trips for {user_name}.")
|
76 |
+
|
77 |
+
async def create_doc(record):
|
78 |
+
image_descriptions = await describe_all_images(record.get('uploadedImages', []))
|
79 |
+
text_content = (f"Trip to {record.get('destinationName', 'N/A')}: Highlights: {record.get('highlights', 'N/A')}\nImage Summary: {image_descriptions}")
|
80 |
+
return Document(text=text_content)
|
81 |
+
|
82 |
+
documents = await asyncio.gather(*[create_doc(r) for r in user_records])
|
83 |
+
print(f"Successfully created {len(documents)} documents for RAG.")
|
84 |
+
|
85 |
+
def build_and_retrieve(docs):
|
86 |
+
print("Building RAG index... You should see a progress bar now.")
|
87 |
+
index = VectorStoreIndex.from_documents(docs, show_progress=True)
|
88 |
+
return index.as_retriever(similarity_top_k=3).retrieve(f"Preferences for {new_destination}: {user_request}")
|
89 |
+
|
90 |
+
retrieved_nodes = await asyncio.to_thread(build_and_retrieve, documents)
|
91 |
+
retrieved_context = "\n\n---\n\n".join([node.get_content() for node in retrieved_nodes])
|
92 |
+
print(f"\n--- Retrieved Context for Persona ---\n{retrieved_context}\n-----------------------------------\n")
|
93 |
+
|
94 |
+
|
95 |
+
system_prompt = "You are an expert travel agent and persona analyst. Your core function is to synthesize a user's past travel preferences with their current request to generate a truly personalized and actionable travel itinerary."
|
96 |
+
|
97 |
+
final_prompt = f"""
|
98 |
+
**Mission:** Generate a hyper-personalized travel plan.
|
99 |
+
|
100 |
+
**1. Input Data:**
|
101 |
+
|
102 |
+
* **User Name:** {user_name}
|
103 |
+
* **Destination:** {new_destination}
|
104 |
+
* **Trip Duration:** {trip_duration_days} days
|
105 |
+
* **Specific Request:** "{user_request}"
|
106 |
+
* **User's Historical Travel Context (for Persona Analysis):**
|
107 |
+
---
|
108 |
+
{retrieved_context}
|
109 |
+
---
|
110 |
+
|
111 |
+
**2. Your Task (A mandatory two-step process):**
|
112 |
+
|
113 |
+
* **Step A: Define the User's Travel Persona.**
|
114 |
+
Based *only* on their historical preferences provided above, build a detailed understanding of this user's core travel style, values, and preferences.
|
115 |
+
|
116 |
+
* **Step B: Craft the Custom Itinerary.**
|
117 |
+
Using your deep understanding of the user's persona from Step A, create a day-by-day travel plan for their trip to {new_destination}. Every recommendation must align with their inferred preferences.
|
118 |
+
|
119 |
+
**3. Required Output Format (Crucial for user connection):**
|
120 |
+
|
121 |
+
1. **Greeting and Persona Summary:**
|
122 |
+
Start with a detailed summary of the user's travel persona, beginning with the phrase "Based on your past travel experiences, I've discovered you are a traveler who...". This summary should be rich with insights. For example: "Based on your past travel experiences, I've discovered you are a traveler who seeks out spectacular, awe-inspiring moments and deep cultural immersion. You appreciate both iconic, grand-scale views (like the fireworks in Tokyo and the Valley of the Kings in Luxor) and have a keen sense for authentic cuisine, while actively avoiding overrated experiences (like the cocktails in Helsinki). You balance thrilling adventures (hot air ballooning) with quiet cultural exploration and maintain a savvy, cautious approach to new environments."
|
123 |
+
|
124 |
+
2. **Introduction to the Plan:**
|
125 |
+
After the persona summary, add a transitional sentence like: "With this understanding of your unique style, I've crafted this tailored itinerary for your Paris adventure:"
|
126 |
+
|
127 |
+
3. **Personalized Itinerary:**
|
128 |
+
Finally, present the day-by-day itinerary in a clear, easy-to-read format.
|
129 |
+
"""
|
130 |
+
|
131 |
+
|
132 |
+
print("--- Calling Final LLM with direct RAG context... ---")
|
133 |
+
response_message = await async_anthropic_client.messages.create(
|
134 |
+
model="claude-3-5-sonnet-20240620",
|
135 |
+
max_tokens=4096, system=system_prompt,
|
136 |
+
messages=[{"role":"user", "content":final_prompt}]
|
137 |
+
)
|
138 |
+
return response_message.content[0].text
|
139 |
+
|
140 |
+
except Exception as e:
|
141 |
+
error_message = f"FATAL TOOL ERROR: {type(e).__name__}: {str(e)}"
|
142 |
+
print("\n\n---" + error_message + "---\n\n")
|
143 |
+
return error_message
|
tools/travel_data_analyzer.py
ADDED
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import asyncio
|
3 |
+
from llama_index.core import (
|
4 |
+
Document,
|
5 |
+
VectorStoreIndex,
|
6 |
+
Settings,
|
7 |
+
StorageContext,
|
8 |
+
load_index_from_storage
|
9 |
+
)
|
10 |
+
from llama_index.core.retrievers import VectorIndexRetriever
|
11 |
+
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
12 |
+
from pymongo import MongoClient
|
13 |
+
from anthropic import AsyncAnthropic
|
14 |
+
import traceback
|
15 |
+
|
16 |
+
|
17 |
+
try:
|
18 |
+
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
|
19 |
+
Settings.chunk_size = 512
|
20 |
+
except Exception as e:
|
21 |
+
print(f"Error initializing LlamaIndex settings in analyzer tool: {e}")
|
22 |
+
|
23 |
+
async_anthropic_client = AsyncAnthropic()
|
24 |
+
|
25 |
+
MONGO_URI = os.getenv("MONGODB_URI")
|
26 |
+
INDEX_PERSIST_DIR = "./travel_data_index"
|
27 |
+
|
28 |
+
mongo_client_analyzer = None
|
29 |
+
db_analyzer = None
|
30 |
+
travel_records_collection_analyzer = None
|
31 |
+
llama_index_instance_analyzer = None
|
32 |
+
|
33 |
+
async def initialize_analyzer_tool():
|
34 |
+
global mongo_client_analyzer, db_analyzer, travel_records_collection_analyzer, llama_index_instance_analyzer
|
35 |
+
|
36 |
+
if llama_index_instance_analyzer is not None and mongo_client_analyzer is not None: # Already initialized
|
37 |
+
print("Analyzer tool already initialized.")
|
38 |
+
return True
|
39 |
+
|
40 |
+
if not MONGO_URI:
|
41 |
+
print("FATAL: MONGODB_URI environment variable not set for Analyzer tool.")
|
42 |
+
return False
|
43 |
+
|
44 |
+
if mongo_client_analyzer is None:
|
45 |
+
try:
|
46 |
+
mongo_client_analyzer = MongoClient(MONGO_URI, serverSelectionTimeoutMS=5000) # Add timeout
|
47 |
+
mongo_client_analyzer.admin.command('ping')
|
48 |
+
db_analyzer = mongo_client_analyzer.get_database()
|
49 |
+
travel_records_collection_analyzer = db_analyzer.get_collection("travelrecords")
|
50 |
+
print("MongoDB connection successful for Analyzer Tool.")
|
51 |
+
if travel_records_collection_analyzer.count_documents({}) == 0: # Check if collection is empty
|
52 |
+
print("Warning: 'travelrecords' collection is empty in MongoDB for Analyzer Tool.")
|
53 |
+
except Exception as e:
|
54 |
+
print(f"FATAL: Could not connect to MongoDB or get collection for Analyzer Tool. Error: {e}")
|
55 |
+
mongo_client_analyzer = None; db_analyzer = None; travel_records_collection_analyzer = None
|
56 |
+
return False
|
57 |
+
|
58 |
+
try:
|
59 |
+
if os.path.exists(INDEX_PERSIST_DIR) and os.listdir(INDEX_PERSIST_DIR):
|
60 |
+
print(f"Loading LlamaIndex from {INDEX_PERSIST_DIR} for Analyzer Tool...")
|
61 |
+
storage_context = StorageContext.from_defaults(persist_dir=INDEX_PERSIST_DIR)
|
62 |
+
llama_index_instance_analyzer = load_index_from_storage(storage_context)
|
63 |
+
print("LlamaIndex loaded successfully for Analyzer Tool.")
|
64 |
+
else:
|
65 |
+
print(f"LlamaIndex not found or directory empty at {INDEX_PERSIST_DIR}. Attempting to build for Analyzer Tool...")
|
66 |
+
if travel_records_collection_analyzer is None:
|
67 |
+
print("Cannot build LlamaIndex: MongoDB collection 'travelrecords' is not available.")
|
68 |
+
return False
|
69 |
+
|
70 |
+
all_records_cursor = travel_records_collection_analyzer.find({})
|
71 |
+
all_records = await asyncio.to_thread(list, all_records_cursor)
|
72 |
+
|
73 |
+
if not all_records:
|
74 |
+
print("No records found in 'travelrecords' collection to build the index. Creating empty index for Analyzer Tool.")
|
75 |
+
llama_index_instance_analyzer = VectorStoreIndex.from_documents([], show_progress=False)
|
76 |
+
return True
|
77 |
+
|
78 |
+
documents = []
|
79 |
+
for record in all_records:
|
80 |
+
content_parts = [
|
81 |
+
f"Traveler Name: {record.get('name', 'N/A')}",
|
82 |
+
f"Destination: {record.get('destinationName', 'N/A')}",
|
83 |
+
f"Country: {record.get('destinationCountry', 'N/A')}",
|
84 |
+
f"Start Date: {record.get('startDate', 'N/A')}",
|
85 |
+
f"End Date: {record.get('endDate', 'N/A')}",
|
86 |
+
f"Duration (days): {record.get('tripDurationDays', 'N/A')}",
|
87 |
+
f"Highlights: {record.get('highlights', 'N/A')}",
|
88 |
+
f"Latitude: {record.get('latitude', 'N/A')}",
|
89 |
+
f"Longitude: {record.get('longitude', 'N/A')}"
|
90 |
+
]
|
91 |
+
document_text = "\n".join(filter(None, content_parts))
|
92 |
+
metadata = {
|
93 |
+
"traveler_name": record.get('name'),
|
94 |
+
"destination_city": record.get('destinationName'),
|
95 |
+
"destination_country": record.get('destinationCountry'),
|
96 |
+
"start_date": str(record.get('startDate')),
|
97 |
+
"latitude": record.get('latitude'),
|
98 |
+
"longitude": record.get('longitude')
|
99 |
+
}
|
100 |
+
filtered_metadata = {k: v for k, v in metadata.items() if v is not None}
|
101 |
+
documents.append(Document(text=document_text, metadata=filtered_metadata))
|
102 |
+
|
103 |
+
if not documents:
|
104 |
+
print("No documents were created from MongoDB records for Analyzer Tool. Creating empty index.")
|
105 |
+
llama_index_instance_analyzer = VectorStoreIndex.from_documents([], show_progress=False)
|
106 |
+
return True
|
107 |
+
|
108 |
+
print(f"Building LlamaIndex from {len(documents)} travel records for Analyzer Tool...")
|
109 |
+
llama_index_instance_analyzer = VectorStoreIndex.from_documents(documents, show_progress=True)
|
110 |
+
llama_index_instance_analyzer.storage_context.persist(persist_dir=INDEX_PERSIST_DIR)
|
111 |
+
print(f"LlamaIndex built and persisted to {INDEX_PERSIST_DIR} for Analyzer Tool.")
|
112 |
+
return True
|
113 |
+
except Exception as e:
|
114 |
+
print(f"Error loading or building LlamaIndex for Analyzer Tool: {e}\n{traceback.format_exc()}")
|
115 |
+
llama_index_instance_analyzer = None
|
116 |
+
return False
|
117 |
+
|
118 |
+
async def analyze_travel_data_with_llm(user_question: str) -> str:
|
119 |
+
"""
|
120 |
+
Analyzes travel records using LlamaIndex and an LLM to answer statistical or comparative questions.
|
121 |
+
The LLM will infer necessary context (like departure points for 'farthest' queries) from the user_question itself.
|
122 |
+
"""
|
123 |
+
global llama_index_instance_analyzer
|
124 |
+
|
125 |
+
if llama_index_instance_analyzer is None:
|
126 |
+
print("LlamaIndex instance not available for analyzer tool. Attempting re-initialization...")
|
127 |
+
init_success = await initialize_analyzer_tool()
|
128 |
+
if not init_success or llama_index_instance_analyzer is None:
|
129 |
+
return "Sorry, the travel data analysis tool is currently unavailable or could not be initialized."
|
130 |
+
|
131 |
+
print(f"--- Analyzer Tool: Processing question: '{user_question}' ---")
|
132 |
+
|
133 |
+
try:
|
134 |
+
retriever = VectorIndexRetriever(
|
135 |
+
index=llama_index_instance_analyzer,
|
136 |
+
similarity_top_k=30,
|
137 |
+
)
|
138 |
+
retrieved_nodes = await asyncio.to_thread(retriever.retrieve, user_question)
|
139 |
+
|
140 |
+
if not retrieved_nodes:
|
141 |
+
print(f"Initial retrieval for '{user_question}' yielded no results. Trying a broader retrieval...")
|
142 |
+
retrieved_nodes = await asyncio.to_thread(retriever.retrieve, "summary of all travel records and traveler activities")
|
143 |
+
if not retrieved_nodes:
|
144 |
+
return "I couldn't find relevant travel records in the database to answer your question, even with a broader search."
|
145 |
+
|
146 |
+
context_str = "\n\n---\n".join([node.get_text() for node in retrieved_nodes]) # Use node.get_text()
|
147 |
+
|
148 |
+
analysis_prompt = f"""
|
149 |
+
You are a data analyst AI. Your task is to answer the user's question based on the provided collection of travel records.
|
150 |
+
|
151 |
+
User's Question: "{user_question}"
|
152 |
+
|
153 |
+
Available Travel Records Context (a subset of all records, retrieved based on similarity to the question):
|
154 |
+
---
|
155 |
+
{context_str}
|
156 |
+
---
|
157 |
+
|
158 |
+
Your Task:
|
159 |
+
Carefully analyze ALL the provided travel records context to answer the user's question.
|
160 |
+
Consider the following when formulating your answer:
|
161 |
+
- "Most active traveler": Interpret this based on the context. It could mean the traveler with the highest number of distinct trip records mentioned, or who has visited the most unique destinations mentioned. If possible, count distinct trips per traveler name. State the basis of your conclusion if ambiguous.
|
162 |
+
- "Farthest place": If the user's question specifies a departure point (e.g., "From Helsinki, who went farthest?"), use that as your origin. If the question does not specify a departure point, you must state that "farthest" is relative and an exact answer cannot be given without an origin, but you can list some very distant-sounding locations mentioned in the records. Use your general geographical knowledge to estimate relative distances. The context includes latitude and longitude for destinations; you can mention these help in determining distance but you are not expected to perform calculations.
|
163 |
+
- "Most times to [country/city]": Count how many separate trip records exist for each traveler to the specified location *within the provided context*.
|
164 |
+
|
165 |
+
Provide a clear, concise, and direct answer.
|
166 |
+
If the provided context is insufficient to definitively answer (e.g., not enough records retrieved), state that clearly (e.g., "Based on the currently available information, I cannot definitively determine... but I can tell you...").
|
167 |
+
If listing multiple travelers for a ranking, please rank them.
|
168 |
+
Base your answer ONLY on the provided travel records context and the user's question. Do not invent information or assume data beyond what's provided in the context.
|
169 |
+
"""
|
170 |
+
|
171 |
+
print(f"--- Sending analytical prompt to LLM (context length: {len(context_str)} chars) ---")
|
172 |
+
response = await async_anthropic_client.messages.create(
|
173 |
+
model="claude-3-5-sonnet-20240620",
|
174 |
+
max_tokens=2000,
|
175 |
+
temperature=0.1,
|
176 |
+
system="You are an AI data analyst. Your responses must be strictly based on the provided context from travel records and the user's question. Be precise and indicate if the context is insufficient for a definitive answer.",
|
177 |
+
messages=[{"role": "user", "content": analysis_prompt}]
|
178 |
+
)
|
179 |
+
|
180 |
+
answer = response.content[0].text.strip() if response.content and response.content[0].text else "I analyzed the data but could not formulate a textual answer from the LLM."
|
181 |
+
print(f"--- LLM Analysis Result: {answer[:300]}... ---")
|
182 |
+
return answer
|
183 |
+
|
184 |
+
except Exception as e:
|
185 |
+
error_message = f"Error during travel data analysis with LLM: {type(e).__name__}: {str(e)}"
|
186 |
+
print(f"{error_message}\n{traceback.format_exc()}")
|
187 |
+
return f"Sorry, I encountered an error trying to analyze the travel data: {str(e)}"
|
tools/user_trip_summary.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import json
|
3 |
+
from pymongo import MongoClient
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
from trip_utils import format_trip_summary
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
client = MongoClient(os.getenv("MONGODB_URI"))
|
10 |
+
db = client.get_database()
|
11 |
+
collection = db.get_collection("travelrecords")
|
12 |
+
|
13 |
+
|
14 |
+
def summarize_trip(person_name: str, city: str) -> str:
|
15 |
+
"""Summarize a user's trip to a given city using MongoDB data."""
|
16 |
+
records = list(collection.find({
|
17 |
+
"name": {"$regex": person_name, "$options": "i"},
|
18 |
+
"$or": [
|
19 |
+
{"destinationName": city},
|
20 |
+
{"locationName": city}
|
21 |
+
]
|
22 |
+
}))
|
23 |
+
|
24 |
+
if not records:
|
25 |
+
return json.dumps({
|
26 |
+
"type": "text",
|
27 |
+
"message": f"No travel records found for {person_name} in {city}."
|
28 |
+
})
|
29 |
+
|
30 |
+
full_summaries = [format_trip_summary(rec) for rec in records]
|
31 |
+
final_summary = "\n---\n".join(full_summaries)
|
32 |
+
|
33 |
+
return json.dumps({
|
34 |
+
"type": "text",
|
35 |
+
"message": final_summary
|
36 |
+
})
|
tools/web_search.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# filename: web_search.py (Hybrid Intelligence Version)
|
2 |
+
|
3 |
+
import os
|
4 |
+
import asyncio
|
5 |
+
from tavily import TavilyClient
|
6 |
+
from anthropic import AsyncAnthropic
|
7 |
+
from dotenv import load_dotenv
|
8 |
+
|
9 |
+
load_dotenv()
|
10 |
+
|
11 |
+
tavily_client = TavilyClient(api_key=os.getenv("TAVILY_API_KEY"))
|
12 |
+
async_anthropic_client = AsyncAnthropic()
|
13 |
+
|
14 |
+
async def get_travel_intelligence_briefing(destination: str, time_period: str, user_keywords: list[str] = None) -> str:
|
15 |
+
"""
|
16 |
+
接收目的地、时间段和用户特定的关键词,执行一个混合了
|
17 |
+
“基础情报”和“关键词情报”的网络搜索,然后汇总成一份简报。
|
18 |
+
"""
|
19 |
+
print(f"\n--- [Hybrid Intelligence] Briefing for: {destination} ({time_period}) with keywords: {user_keywords} ---")
|
20 |
+
|
21 |
+
queries = [
|
22 |
+
f"major holidays or festivals in {destination} during {time_period}",
|
23 |
+
f"weather advisories or natural disaster risk for {destination} {time_period}",
|
24 |
+
f"general transport strikes or protests planned for {destination} {time_period}",
|
25 |
+
f"official travel warnings or geopolitical issues for {destination}",
|
26 |
+
]
|
27 |
+
|
28 |
+
if user_keywords:
|
29 |
+
for keyword in user_keywords:
|
30 |
+
queries.append(f"latest news, closures, or strikes related to {keyword} {destination}")
|
31 |
+
|
32 |
+
print(f"--- Step 1: Generated {len(queries)} hybrid queries. ---")
|
33 |
+
|
34 |
+
async def _run_single_search(query):
|
35 |
+
try:
|
36 |
+
return await asyncio.to_thread(
|
37 |
+
tavily_client.search,
|
38 |
+
query=query, search_depth="basic", max_results=3
|
39 |
+
)
|
40 |
+
except Exception as e:
|
41 |
+
print(f" - Search failed for query '{query}': {e}")
|
42 |
+
return None
|
43 |
+
|
44 |
+
print("--- Step 2: Executing all searches in parallel...")
|
45 |
+
search_tasks = [_run_single_search(q) for q in queries]
|
46 |
+
all_results = await asyncio.gather(*search_tasks)
|
47 |
+
|
48 |
+
raw_context = ""
|
49 |
+
for query, result_pack in zip(queries, all_results):
|
50 |
+
if result_pack and result_pack.get('results'):
|
51 |
+
raw_context += f"Intelligence regarding '{query}':\n"
|
52 |
+
for res in result_pack['results']:
|
53 |
+
raw_context += f"- {res['content']}\n"
|
54 |
+
raw_context += "\n---\n"
|
55 |
+
|
56 |
+
if not raw_context:
|
57 |
+
return "Could not retrieve any relevant travel intelligence from the web."
|
58 |
+
|
59 |
+
print("--- Step 3: Synthesizing all intelligence into a final briefing...")
|
60 |
+
system_prompt = "You are a professional travel intelligence analyst. Your job is to synthesize raw data into a clear, concise, and actionable briefing for a trip planner. Structure your output with clear headings."
|
61 |
+
|
62 |
+
synthesis_prompt = f"""
|
63 |
+
Please analyze the following raw web search results for a trip to **{destination}** during **{time_period}**.
|
64 |
+
Consolidate all information into a structured 'Travel Intelligence Briefing'.
|
65 |
+
For each category (General Safety, Weather, Events, Transportation, and **User-Specific Keywords**), provide a concise summary.
|
66 |
+
If no information is found for a category, state "No significant information found."
|
67 |
+
|
68 |
+
**Raw Intelligence Data:**
|
69 |
+
---
|
70 |
+
{raw_context}
|
71 |
+
---
|
72 |
+
|
73 |
+
**Required Output Format:**
|
74 |
+
|
75 |
+
**Travel Intelligence Briefing: {destination} ({time_period})**
|
76 |
+
|
77 |
+
**1. General Safety & Advisories:**
|
78 |
+
- [Summary of wars, warnings, policy changes]
|
79 |
+
|
80 |
+
**2. Weather & Environment:**
|
81 |
+
- [Summary of forecasts, climate patterns, disaster risks]
|
82 |
+
|
83 |
+
**3. Cultural Events & Holidays:**
|
84 |
+
- [Summary of festivals, holidays, major events]
|
85 |
+
|
86 |
+
**4. General Transportation:**
|
87 |
+
- [Summary of general strikes or disruptions]
|
88 |
+
|
89 |
+
**5. User-Specific Items:**
|
90 |
+
- [For each keyword from the user_keywords list, provide a specific summary. e.g., "Regarding Finnair: ...", "Regarding Louvre Museum: ..."]
|
91 |
+
"""
|
92 |
+
|
93 |
+
try:
|
94 |
+
response = await async_anthropic_client.messages.create(
|
95 |
+
model="claude-3-haiku-20240307", max_tokens=1000,
|
96 |
+
system=system_prompt, messages=[{"role": "user", "content": synthesis_prompt}]
|
97 |
+
)
|
98 |
+
return response.content[0].text
|
99 |
+
except Exception as e:
|
100 |
+
return f"Error during final synthesis: {e}"
|
travel_data_index/default__vector_store.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
travel_data_index/docstore.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"docstore/metadata": {"bb101f42-b81c-4488-999c-d3cb26224653": {"doc_hash": "c0278d8a1bb32be4882dcdf12dc6f4192affcf0f76fe3751c5eb044d0cb987bf"}, "62ae5226-b04e-4c00-96e6-5f5ad48f69f3": {"doc_hash": "04f1987e71e16b9c4fb2d260fc8eb5b5e5f453bb84e76525cde2fdf549ba733d"}, "f5c370e9-06d5-43b0-8f91-fdcfb9e39445": {"doc_hash": "13d5a322d7904643e1bc4249c01e2c4b79bab3ce77f26ffb697913af622e3489"}, "9fcfe6c3-de7d-425c-9466-b3d38ed478d2": {"doc_hash": "140ea2ebc97026668db13701b4d598312ce3df3c0a23ab3648c14349d41723c4"}, "b496d504-fc1b-45ee-aa60-16fc8a4b89f5": {"doc_hash": "41af37bf50a80fafbc7b8677d04833477782dc0321f51a9ac999d07f70fe7870"}, "2f305f8d-83c3-4edf-a6bd-0a23eaacdc8c": {"doc_hash": "81363f1506d51347d637bd8031c3cb27044ed9d287e431aa1bf566d50d92594d"}, "247908d5-033f-4b93-a7b2-734d4b1ccaf9": {"doc_hash": "002b580e064b4240af8fa5344250ce5997c0a9b56f5bf831cb6827e7ff2a32ed"}, "d54f1f7a-8f8b-4f1d-ace4-75425368fb78": {"doc_hash": "a73b2f6c36958b454c97436c8b9317af334fa5818beea6e0c8692ce8fbf88501"}, "d19b766a-3dc3-476b-966d-7653662afe4b": {"doc_hash": "c54bb0e944567ead67d37571ec8a10221f29a861403bb61bd151cc6caecb1e1f"}, "3b63eab1-6f0f-4c85-ba41-885cb6c5d253": {"doc_hash": "e76c2866c3e37ce03d92bc6b5fee6b1a8f26c7de346083f4a83336a75ea59cde"}, "bd86c502-cf6d-4f8c-9402-2e03abdd8d3f": {"doc_hash": "b715a77bd447e8a8cad72e73fdab56d6946af44e86eb90afc055926a8464629e"}, "d7589893-5529-493c-ab6b-5459fc44a698": {"doc_hash": "879f469149b8407ff6f55f9e217e1d91683a2ccf6700ba01df7554988176067e"}, "c61645a8-73f1-42b2-b2b1-bca2f82e1016": {"doc_hash": "cbad53c24516c53d878cd5f882102be51632b299cb409632d77da75c9189b6f9"}, "fed218ef-7453-4323-a215-259fc59a0f6f": {"doc_hash": "d0dc7563fe83dc57d77b3091f93e2d49d4c000059fa16d47061b1571da20e00d"}, "d1109c92-031e-47e2-9c1d-89f1d6717a60": {"doc_hash": "63a0b1d0d2740a8731b2d02605bcec6a176115770022cf5498ecd990de81c8b0"}, "0f962f60-69f7-47e9-8a61-a032320629b2": {"doc_hash": "bb7f3e7fa9a60377d98524d68c3491cf73114bcff69e9895b6ddeb4602eee07f"}, "97c79db6-e797-4741-aba7-d795ab508a97": {"doc_hash": "f1a3b44f0946e4a90d63e02d7e030a9c51c2abc445ad282ec37484c5a46368db"}, "8d53562c-f999-45a2-ae5f-9a7b295a80b6": {"doc_hash": "0b0adfd6c5005e18f989f8eddbaf1fd5ceeff71dd785da8e0d71905f50564c94"}, "a47213d0-0611-4d26-912b-212b46e3f4e2": {"doc_hash": "055cea1bb5dd1aabe62feeeee45abd5299080c31af8bac7d6a4ad33276184047"}, "c3ca79b8-3aea-4364-8361-9b0b51951522": {"doc_hash": "0dfdf1a0aae084b19c19d83a65f754a043f67c0baa2cdfadecdf62f352dc2718"}, "ab8efc2a-e97b-4198-8abb-5f0ee915f412": {"doc_hash": "901e664197f3b54cd1dbb0f4f1ce909754de98e61dbe987a1b8c905f574225e8"}, "90395f33-a9d7-4875-906d-b6f7a072ef28": {"doc_hash": "16be2cc4792bd3e71046fbb390b9014082c404206f889a2a2d4d8e3fb1e0c45a"}, "af2b1766-a414-4f0f-a335-c25a10315f59": {"doc_hash": "c16fe3eb809d42fd90583d39239df3fcfdd7116bbc3422f2853af95cf2cd822a"}, "b199dc03-c2bd-4575-86a1-b11decafa20e": {"doc_hash": "dc9ea0718152e1f6cbe46094952555046aa7ddb816f2fc934ae9a9eb8244f18b"}, "26b4a98b-0615-4649-a977-c5c4b9b82f40": {"doc_hash": "8691aba649be531580f27c86f69fdb0339a423d1059d236cb403f9ed289bab8c"}, "118833ff-b0d8-4628-a3a5-efaf41c3e90c": {"doc_hash": "68ab119b8243c30cc0e853f9629cbc13eb6b1eb1bcd974516936ce86768ea2df", "ref_doc_id": "bb101f42-b81c-4488-999c-d3cb26224653"}, "5ad3594b-22c9-474f-83ad-6d849cf2401b": {"doc_hash": "f0c0d1d0be01a53f96a725ae4ec4fa51957f78b97216853a587c8855125dc8da", "ref_doc_id": "62ae5226-b04e-4c00-96e6-5f5ad48f69f3"}, "e1831c93-7292-485a-a70e-549f321b3b19": {"doc_hash": "12954fc0f1fd8d92dd9419e6c959729078383d91d61983f6df5eae4d8a5b81ac", "ref_doc_id": "f5c370e9-06d5-43b0-8f91-fdcfb9e39445"}, "f2106256-8633-4e75-920e-e547fa634d6f": {"doc_hash": "39b542a1f6f36b1cc075e453c26b9c0e081636e0791dda2ca3b38814ec6c8c06", "ref_doc_id": "9fcfe6c3-de7d-425c-9466-b3d38ed478d2"}, "5b2bfeda-fe49-47ea-8985-81284fde8f4e": {"doc_hash": "03cfaf889ec83d5aefb6318b8fb77a711e7c777d00ecd7038a72ca660ee92f27", "ref_doc_id": "b496d504-fc1b-45ee-aa60-16fc8a4b89f5"}, "d9e000b5-9ade-48a6-aece-3f8e4f02184f": {"doc_hash": "98e50f6be6f9819c21c4c3a3f3bf70be40e806306b7742475fcc88605a98387a", "ref_doc_id": "2f305f8d-83c3-4edf-a6bd-0a23eaacdc8c"}, "2383f256-33e0-46c7-a5c6-3dee50db45ce": {"doc_hash": "9afa7bf8afb56979879ae7073d25e2ffe8e81f5db2990221fed80e020e1f24f1", "ref_doc_id": "247908d5-033f-4b93-a7b2-734d4b1ccaf9"}, "ff01ff0a-2f6d-4df0-b099-f94ebe16817d": {"doc_hash": "e7d6b7356592c675768e37d95da27b590945d6cc582b90af7a53ea7ca7c8c956", "ref_doc_id": "d54f1f7a-8f8b-4f1d-ace4-75425368fb78"}, "a60bb298-8ce7-4aba-9eb9-545245567801": {"doc_hash": "693dc640598a0cd2826ac6f08ba8e8ec5ba6da90c2e1cd75b634094b476fe0a9", "ref_doc_id": "d19b766a-3dc3-476b-966d-7653662afe4b"}, "5f038e04-d26d-4f9e-b9c3-8c0d993d1769": {"doc_hash": "6fe0b9fec62c142d702be46eba260a907fdd733b74a89f4074321278314e36f4", "ref_doc_id": "3b63eab1-6f0f-4c85-ba41-885cb6c5d253"}, "65d5e162-c9d2-48af-9c3a-bed89ce949e9": {"doc_hash": "5f2efb2e86d1c35899b54129128c9fc102fe54207d8d4b315b2a2d3c5c07b5e9", "ref_doc_id": "bd86c502-cf6d-4f8c-9402-2e03abdd8d3f"}, "8af0a430-2a75-40ba-ae23-abe8d3535850": {"doc_hash": "5eca7f1e3ed2aca45d81c95fbc70f323cbd281f30c5f8e3f703c04c7aa677f60", "ref_doc_id": "d7589893-5529-493c-ab6b-5459fc44a698"}, "ffe06152-4993-44ee-8420-29ef8f12610e": {"doc_hash": "55da25b3c8fa938f816133e4b50d3a7636112446e25c65aa73cbcccf8a91ceaf", "ref_doc_id": "c61645a8-73f1-42b2-b2b1-bca2f82e1016"}, "3db9558a-ffb7-45d5-9310-524b4b12952e": {"doc_hash": "0cda97c1e6531269e17f86d3dacaafa89cf81ffef07193179a71f941b801d945", "ref_doc_id": "fed218ef-7453-4323-a215-259fc59a0f6f"}, "b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93": {"doc_hash": "a617115b8135dfa24bd8dad368d6fd1e3591f9a5f28ea76bcac4446bb2bbefd3", "ref_doc_id": "d1109c92-031e-47e2-9c1d-89f1d6717a60"}, "55ef06b3-8957-4ffa-b9e6-4cae377ef834": {"doc_hash": "a7bd1142687cf40472d16d2c95cbeb55d2b44cc759da8feb89507346f43a293c", "ref_doc_id": "0f962f60-69f7-47e9-8a61-a032320629b2"}, "71ddf846-cbda-4d3d-b5a6-47d398474e83": {"doc_hash": "0bc8ddb01176c34ae027f6759dc3ff9510958a0c84153f84e8fa03b3b4d93760", "ref_doc_id": "97c79db6-e797-4741-aba7-d795ab508a97"}, "0c57b44b-7d53-49ab-8068-b7268698958d": {"doc_hash": "a51439c991fb4bb0e86806a5b5d242e1c7e5e810758fa32479df9db9283eb6cb", "ref_doc_id": "8d53562c-f999-45a2-ae5f-9a7b295a80b6"}, "1837793e-04e2-41b3-8379-df8d259b80a7": {"doc_hash": "2e21ee4ae4826f33e1a67b8effe73c3debf1472311ff79145de154c7c24ffe59", "ref_doc_id": "a47213d0-0611-4d26-912b-212b46e3f4e2"}, "d095ccee-dfd0-465e-a410-1630274d0bce": {"doc_hash": "553b822a2191559f0ec54e90e5f7adc44b6dacdfffa292049cf554eb09cebcf8", "ref_doc_id": "c3ca79b8-3aea-4364-8361-9b0b51951522"}, "ed1e1cca-2d3f-4206-b115-ca5bd5f692af": {"doc_hash": "961e54a5cdc3d36d770488478199edb0517bfcfeda9f6b011145eb2894b2c486", "ref_doc_id": "ab8efc2a-e97b-4198-8abb-5f0ee915f412"}, "40bff8fd-e5ea-434b-a2f5-546c77709e0a": {"doc_hash": "fe2b574d64ee3d6cae8894767359118e63255be90212a78e83d67b112468988b", "ref_doc_id": "90395f33-a9d7-4875-906d-b6f7a072ef28"}, "5f21fbc5-8459-4477-b89b-59b791f3d064": {"doc_hash": "1df750782f881f016302f1d3ce85515ea8e9968cc8a4846eeb4e9fc3f9b92925", "ref_doc_id": "af2b1766-a414-4f0f-a335-c25a10315f59"}, "57416b99-df61-4e5e-a8ac-5c3fb99aec5d": {"doc_hash": "a88a1b07a40dbe62759476b98698d0a82af061aa3979d1644a53464d72f4c632", "ref_doc_id": "b199dc03-c2bd-4575-86a1-b11decafa20e"}, "af223824-d6e3-46a6-982a-01fa2a04b06f": {"doc_hash": "b57c09b405d1339eb92ce35f679d5e2e033d0b6ae3416e52751c6d3cca350332", "ref_doc_id": "26b4a98b-0615-4649-a977-c5c4b9b82f40"}}, "docstore/data": {"118833ff-b0d8-4628-a3a5-efaf41c3e90c": {"__data__": {"id_": "118833ff-b0d8-4628-a3a5-efaf41c3e90c", "embedding": null, "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Helsinki", "start_date": "2025-05-28 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "bb101f42-b81c-4488-999c-d3cb26224653", "node_type": "4", "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Helsinki", "start_date": "2025-05-28 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}, "hash": "c0278d8a1bb32be4882dcdf12dc6f4192affcf0f76fe3751c5eb044d0cb987bf", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Tracy Shen\nDestination: Helsinki\nCountry: N/A\nStart Date: 2025-05-28 00:00:00\nEnd Date: 2025-05-31 00:00:00\nDuration (days): N/A\nHighlights: Avoid the cocktails at Holidays restaurant by the seaside - they were absolutely terrible, possibly the worst I've ever had. Left mine almost completely untouched on the table. \nLatitude: 60.16985569999999\nLongitude: 24.938379", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 382, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "5ad3594b-22c9-474f-83ad-6d849cf2401b": {"__data__": {"id_": "5ad3594b-22c9-474f-83ad-6d849cf2401b", "embedding": null, "metadata": {"traveler_name": "Hope", "destination_city": "Armsterdam", "start_date": "2025-02-01 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "62ae5226-b04e-4c00-96e6-5f5ad48f69f3", "node_type": "4", "metadata": {"traveler_name": "Hope", "destination_city": "Armsterdam", "start_date": "2025-02-01 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}, "hash": "04f1987e71e16b9c4fb2d260fc8eb5b5e5f453bb84e76525cde2fdf549ba733d", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Hope\nDestination: Armsterdam\nCountry: N/A\nStart Date: 2025-02-01 00:00:00\nEnd Date: 2025-02-05 00:00:00\nDuration (days): N/A\nHighlights: The houses are tall and look like from a fairy tale. But the streets can be crowded, so stay close to your family.\nLatitude: 52.3675734\nLongitude: 4.9041389", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 308, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "e1831c93-7292-485a-a70e-549f321b3b19": {"__data__": {"id_": "e1831c93-7292-485a-a70e-549f321b3b19", "embedding": null, "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Chongqing", "start_date": "2024-09-01 00:00:00", "latitude": 29.5656843, "longitude": 106.5511838}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "f5c370e9-06d5-43b0-8f91-fdcfb9e39445", "node_type": "4", "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Chongqing", "start_date": "2024-09-01 00:00:00", "latitude": 29.5656843, "longitude": 106.5511838}, "hash": "13d5a322d7904643e1bc4249c01e2c4b79bab3ce77f26ffb697913af622e3489", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Yushi Tan\nDestination: Chongqing\nCountry: N/A\nStart Date: 2024-09-01 00:00:00\nEnd Date: 2024-09-05 00:00:00\nDuration (days): N/A\nHighlights: The city has tall buildings and mountains \u2014 it looks like a movie! But there are many stairs and hills, so wear good shoes.\nLatitude: 29.5656843\nLongitude: 106.5511838", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 323, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "f2106256-8633-4e75-920e-e547fa634d6f": {"__data__": {"id_": "f2106256-8633-4e75-920e-e547fa634d6f", "embedding": null, "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Luxor", "start_date": "2025-03-15 00:00:00", "latitude": 25.6872431, "longitude": 32.6396357}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "9fcfe6c3-de7d-425c-9466-b3d38ed478d2", "node_type": "4", "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Luxor", "start_date": "2025-03-15 00:00:00", "latitude": 25.6872431, "longitude": 32.6396357}, "hash": "140ea2ebc97026668db13701b4d598312ce3df3c0a23ab3648c14349d41723c4", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Tracy Shen\nDestination: Luxor\nCountry: N/A\nStart Date: 2025-03-15 00:00:00\nEnd Date: 2025-03-17 00:00:00\nDuration (days): N/A\nHighlights: The hot air balloon ride in Luxor is absolutely worth it \u2014 you'll get a breathtaking view over the entire Valley of the Kings. However, be cautious: there are many scammers. Don\u2019t trust any locals asking for money without clear context or official credentials.Always negotiate prices \u2014 which is essential in Egypt.\nLatitude: 25.6872431\nLongitude: 32.6396357", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 510, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "5b2bfeda-fe49-47ea-8985-81284fde8f4e": {"__data__": {"id_": "5b2bfeda-fe49-47ea-8985-81284fde8f4e", "embedding": null, "metadata": {"traveler_name": "Hope", "destination_city": "Helsinki ", "start_date": "2025-05-30 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "b496d504-fc1b-45ee-aa60-16fc8a4b89f5", "node_type": "4", "metadata": {"traveler_name": "Hope", "destination_city": "Helsinki ", "start_date": "2025-05-30 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}, "hash": "41af37bf50a80fafbc7b8677d04833477782dc0321f51a9ac999d07f70fe7870", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Hope\nDestination: Helsinki \nCountry: N/A\nStart Date: 2025-05-30 00:00:00\nEnd Date: 2025-05-30 00:00:00\nDuration (days): N/A\nHighlights: Helsinki art museum will starts a new biennial at June 8\r\n\nLatitude: 60.16985569999999\nLongitude: 24.938379", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 258, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "d9e000b5-9ade-48a6-aece-3f8e4f02184f": {"__data__": {"id_": "d9e000b5-9ade-48a6-aece-3f8e4f02184f", "embedding": null, "metadata": {"traveler_name": "Jiao Chen", "destination_city": "Rovaniemi", "start_date": "2025-04-25 00:00:00", "latitude": 66.50394779999999, "longitude": 25.7293905}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "2f305f8d-83c3-4edf-a6bd-0a23eaacdc8c", "node_type": "4", "metadata": {"traveler_name": "Jiao Chen", "destination_city": "Rovaniemi", "start_date": "2025-04-25 00:00:00", "latitude": 66.50394779999999, "longitude": 25.7293905}, "hash": "81363f1506d51347d637bd8031c3cb27044ed9d287e431aa1bf566d50d92594d", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Jiao Chen\nDestination: Rovaniemi\nCountry: N/A\nStart Date: 2025-04-25 00:00:00\nEnd Date: 2025-04-29 00:00:00\nDuration (days): N/A\nHighlights: The best experiences were completely free\u2014nature in Finland is simply magical. The Santa Claus Village was okay for a quick visit, but it didn\u2019t compare to the beauty of the outdoors. Nature truly stole the show.\r\n\nLatitude: 66.50394779999999\nLongitude: 25.7293905", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 420, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "2383f256-33e0-46c7-a5c6-3dee50db45ce": {"__data__": {"id_": "2383f256-33e0-46c7-a5c6-3dee50db45ce", "embedding": null, "metadata": {"traveler_name": "Hope", "destination_city": "Mangshi", "start_date": "2024-06-01 00:00:00", "latitude": 24.4337899, "longitude": 98.58815}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "247908d5-033f-4b93-a7b2-734d4b1ccaf9", "node_type": "4", "metadata": {"traveler_name": "Hope", "destination_city": "Mangshi", "start_date": "2024-06-01 00:00:00", "latitude": 24.4337899, "longitude": 98.58815}, "hash": "002b580e064b4240af8fa5344250ce5997c0a9b56f5bf831cb6827e7ff2a32ed", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Hope\nDestination: Mangshi\nCountry: N/A\nStart Date: 2024-06-01 00:00:00\nEnd Date: 2024-06-04 00:00:00\nDuration (days): N/A\nHighlights: Mangshi is a place with a mixture of Jingpo, Dai and Han Chinese, so remember to respect other people's ethnic customs.\nLatitude: 24.4337899\nLongitude: 98.58815", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 309, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "ff01ff0a-2f6d-4df0-b099-f94ebe16817d": {"__data__": {"id_": "ff01ff0a-2f6d-4df0-b099-f94ebe16817d", "embedding": null, "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Tokyo", "start_date": "2024-06-06 00:00:00", "latitude": 35.6764225, "longitude": 139.650027}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "d54f1f7a-8f8b-4f1d-ace4-75425368fb78", "node_type": "4", "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Tokyo", "start_date": "2024-06-06 00:00:00", "latitude": 35.6764225, "longitude": 139.650027}, "hash": "a73b2f6c36958b454c97436c8b9317af334fa5818beea6e0c8692ce8fbf88501", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Tracy Shen\nDestination: Tokyo\nCountry: N/A\nStart Date: 2024-06-06 00:00:00\nEnd Date: 2024-06-13 00:00:00\nDuration (days): N/A\nHighlights: Seaside fireworks festival was spectacular - the combination of ocean breeze and colorful explosions made for an unforgettable summer evening.\nLatitude: 35.6764225\nLongitude: 139.650027", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 338, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "a60bb298-8ce7-4aba-9eb9-545245567801": {"__data__": {"id_": "a60bb298-8ce7-4aba-9eb9-545245567801", "embedding": null, "metadata": {"traveler_name": "Santeri Heino", "destination_city": "Riga", "start_date": "2024-08-12 00:00:00", "latitude": 56.9676941, "longitude": 24.1056221}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "d19b766a-3dc3-476b-966d-7653662afe4b", "node_type": "4", "metadata": {"traveler_name": "Santeri Heino", "destination_city": "Riga", "start_date": "2024-08-12 00:00:00", "latitude": 56.9676941, "longitude": 24.1056221}, "hash": "c54bb0e944567ead67d37571ec8a10221f29a861403bb61bd151cc6caecb1e1f", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Santeri Heino\nDestination: Riga\nCountry: N/A\nStart Date: 2024-08-12 00:00:00\nEnd Date: 2024-08-16 00:00:00\nDuration (days): N/A\nHighlights: \nLatitude: 56.9676941\nLongitude: 24.1056221", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 198, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "5f038e04-d26d-4f9e-b9c3-8c0d993d1769": {"__data__": {"id_": "5f038e04-d26d-4f9e-b9c3-8c0d993d1769", "embedding": null, "metadata": {"traveler_name": "Min Khant", "destination_city": "Bangkok,Thailand", "start_date": "2024-02-21 00:00:00", "latitude": 13.7563309, "longitude": 100.5017651}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "3b63eab1-6f0f-4c85-ba41-885cb6c5d253", "node_type": "4", "metadata": {"traveler_name": "Min Khant", "destination_city": "Bangkok,Thailand", "start_date": "2024-02-21 00:00:00", "latitude": 13.7563309, "longitude": 100.5017651}, "hash": "e76c2866c3e37ce03d92bc6b5fee6b1a8f26c7de346083f4a83336a75ea59cde", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Min Khant\nDestination: Bangkok,Thailand\nCountry: N/A\nStart Date: 2024-02-21 00:00:00\nEnd Date: 2024-05-21 00:00:00\nDuration (days): N/A\nHighlights: take a grab or local bike. its so much fun and you will love the Thailand culture.\nLatitude: 13.7563309\nLongitude: 100.5017651", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 289, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "65d5e162-c9d2-48af-9c3a-bed89ce949e9": {"__data__": {"id_": "65d5e162-c9d2-48af-9c3a-bed89ce949e9", "embedding": null, "metadata": {"traveler_name": "Matias", "destination_city": "Gran Canaria", "start_date": "2019-04-01 00:00:00", "latitude": 27.9202202, "longitude": -15.5474373}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "bd86c502-cf6d-4f8c-9402-2e03abdd8d3f", "node_type": "4", "metadata": {"traveler_name": "Matias", "destination_city": "Gran Canaria", "start_date": "2019-04-01 00:00:00", "latitude": 27.9202202, "longitude": -15.5474373}, "hash": "b715a77bd447e8a8cad72e73fdab56d6946af44e86eb90afc055926a8464629e", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Matias\nDestination: Gran Canaria\nCountry: N/A\nStart Date: 2019-04-01 00:00:00\nEnd Date: 2019-04-08 00:00:00\nDuration (days): N/A\nHighlights: Warm and quite cheap destination.\nLatitude: 27.9202202\nLongitude: -15.5474373", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 233, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "8af0a430-2a75-40ba-ae23-abe8d3535850": {"__data__": {"id_": "8af0a430-2a75-40ba-ae23-abe8d3535850", "embedding": null, "metadata": {"traveler_name": "Nikke", "destination_city": "Spain", "start_date": "2023-07-04 00:00:00", "latitude": 40.46366700000001, "longitude": -3.74922}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "d7589893-5529-493c-ab6b-5459fc44a698", "node_type": "4", "metadata": {"traveler_name": "Nikke", "destination_city": "Spain", "start_date": "2023-07-04 00:00:00", "latitude": 40.46366700000001, "longitude": -3.74922}, "hash": "879f469149b8407ff6f55f9e217e1d91683a2ccf6700ba01df7554988176067e", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Nikke\nDestination: Spain\nCountry: N/A\nStart Date: 2023-07-04 00:00:00\nEnd Date: 2023-07-16 00:00:00\nDuration (days): N/A\nHighlights: Be careful with the street peddlers.\nLatitude: 40.46366700000001\nLongitude: -3.74922", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 232, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "ffe06152-4993-44ee-8420-29ef8f12610e": {"__data__": {"id_": "ffe06152-4993-44ee-8420-29ef8f12610e", "embedding": null, "metadata": {"traveler_name": "Hx", "destination_city": "Amsterdam", "start_date": "2024-06-05 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "c61645a8-73f1-42b2-b2b1-bca2f82e1016", "node_type": "4", "metadata": {"traveler_name": "Hx", "destination_city": "Amsterdam", "start_date": "2024-06-05 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}, "hash": "cbad53c24516c53d878cd5f882102be51632b299cb409632d77da75c9189b6f9", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Hx\nDestination: Amsterdam\nCountry: N/A\nStart Date: 2024-06-05 00:00:00\nEnd Date: 2024-06-10 00:00:00\nDuration (days): N/A\nHighlights: The canal view and street views were beautiful.\nLatitude: 52.3675734\nLongitude: 4.9041389", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 238, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "3db9558a-ffb7-45d5-9310-524b4b12952e": {"__data__": {"id_": "3db9558a-ffb7-45d5-9310-524b4b12952e", "embedding": null, "metadata": {"traveler_name": "Evelyn", "destination_city": "Brussels, Belgium", "start_date": "2025-04-01 00:00:00", "latitude": 50.503887, "longitude": 4.469936}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "fed218ef-7453-4323-a215-259fc59a0f6f", "node_type": "4", "metadata": {"traveler_name": "Evelyn", "destination_city": "Brussels, Belgium", "start_date": "2025-04-01 00:00:00", "latitude": 50.503887, "longitude": 4.469936}, "hash": "d0dc7563fe83dc57d77b3091f93e2d49d4c000059fa16d47061b1571da20e00d", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Evelyn\nDestination: Brussels, Belgium\nCountry: N/A\nStart Date: 2025-04-01 00:00:00\nEnd Date: 2025-04-04 00:00:00\nDuration (days): N/A\nHighlights: Good experience.\nLatitude: 50.503887\nLongitude: 4.469936", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 217, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93": {"__data__": {"id_": "b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93", "embedding": null, "metadata": {"traveler_name": "Kyrie", "destination_city": "Italy", "start_date": "2025-05-02 00:00:00", "latitude": 41.87194, "longitude": 12.56738}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "d1109c92-031e-47e2-9c1d-89f1d6717a60", "node_type": "4", "metadata": {"traveler_name": "Kyrie", "destination_city": "Italy", "start_date": "2025-05-02 00:00:00", "latitude": 41.87194, "longitude": 12.56738}, "hash": "63a0b1d0d2740a8731b2d02605bcec6a176115770022cf5498ecd990de81c8b0", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Kyrie\nDestination: Italy\nCountry: N/A\nStart Date: 2025-05-02 00:00:00\nEnd Date: 2025-06-13 00:00:00\nDuration (days): N/A\nHighlights: One thing I have to say which is absolutely amazing, one day I couldn\u2019t sleep in the morning when I was in Rome, so I went to Colosseum at 4 pm, I\u2019ve met three Romans and they were so enthusiastic that I couldn\u2019t forget.\nLatitude: 41.87194\nLongitude: 12.56738", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 407, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "55ef06b3-8957-4ffa-b9e6-4cae377ef834": {"__data__": {"id_": "55ef06b3-8957-4ffa-b9e6-4cae377ef834", "embedding": null, "metadata": {"traveler_name": "Hope", "destination_city": "Oulu", "start_date": "2025-05-23 00:00:00", "latitude": 65.0120888, "longitude": 25.4650773}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "0f962f60-69f7-47e9-8a61-a032320629b2", "node_type": "4", "metadata": {"traveler_name": "Hope", "destination_city": "Oulu", "start_date": "2025-05-23 00:00:00", "latitude": 65.0120888, "longitude": 25.4650773}, "hash": "bb7f3e7fa9a60377d98524d68c3491cf73114bcff69e9895b6ddeb4602eee07f", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Hope\nDestination: Oulu\nCountry: N/A\nStart Date: 2025-05-23 00:00:00\nEnd Date: 2025-05-25 00:00:00\nDuration (days): N/A\nHighlights: - Oulu Art Museum\r\nA interesting place, often renew their exhibitions, recommend!\nLatitude: 65.0120888\nLongitude: 25.4650773", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 270, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "71ddf846-cbda-4d3d-b5a6-47d398474e83": {"__data__": {"id_": "71ddf846-cbda-4d3d-b5a6-47d398474e83", "embedding": null, "metadata": {"traveler_name": "LIN", "destination_city": "Paris", "start_date": "2025-03-01 00:00:00", "latitude": 48.8575475, "longitude": 2.3513765}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "97c79db6-e797-4741-aba7-d795ab508a97", "node_type": "4", "metadata": {"traveler_name": "LIN", "destination_city": "Paris", "start_date": "2025-03-01 00:00:00", "latitude": 48.8575475, "longitude": 2.3513765}, "hash": "f1a3b44f0946e4a90d63e02d7e030a9c51c2abc445ad282ec37484c5a46368db", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: LIN\nDestination: Paris\nCountry: N/A\nStart Date: 2025-03-01 00:00:00\nEnd Date: 2025-03-09 00:00:00\nDuration (days): N/A\nHighlights: Paris is a dynamic city with multiple cultures and activities, but living costs are way too high based on the level of their salaries.\nLatitude: 48.8575475\nLongitude: 2.3513765", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 322, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "0c57b44b-7d53-49ab-8068-b7268698958d": {"__data__": {"id_": "0c57b44b-7d53-49ab-8068-b7268698958d", "embedding": null, "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Antwerp", "start_date": "2024-11-16 00:00:00", "latitude": 51.2199302, "longitude": 4.414990299999999}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "8d53562c-f999-45a2-ae5f-9a7b295a80b6", "node_type": "4", "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Antwerp", "start_date": "2024-11-16 00:00:00", "latitude": 51.2199302, "longitude": 4.414990299999999}, "hash": "0b0adfd6c5005e18f989f8eddbaf1fd5ceeff71dd785da8e0d71905f50564c94", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Yushi Tan\nDestination: Antwerp\nCountry: N/A\nStart Date: 2024-11-16 00:00:00\nEnd Date: 2024-11-18 00:00:00\nDuration (days): N/A\nHighlights: Antwerp\u2019s fashion district is a must-see if you\u2019re into high-end and designer stores, with MoMu (Fashion Museum) providing great insight into the city's role in global fashion. The only critique I have is that some of the museums can be a bit pricey, but the experience is worth it. Definitely take time to explore the Meir shopping street and enjoy a Belgian waffle while you\u2019re at it.\nLatitude: 51.2199302\nLongitude: 4.414990299999999", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 590, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "1837793e-04e2-41b3-8379-df8d259b80a7": {"__data__": {"id_": "1837793e-04e2-41b3-8379-df8d259b80a7", "embedding": null, "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Stockholm", "start_date": "2014-09-05 00:00:00", "latitude": 59.3327036, "longitude": 18.0656255}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "a47213d0-0611-4d26-912b-212b46e3f4e2", "node_type": "4", "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Stockholm", "start_date": "2014-09-05 00:00:00", "latitude": 59.3327036, "longitude": 18.0656255}, "hash": "055cea1bb5dd1aabe62feeeee45abd5299080c31af8bac7d6a4ad33276184047", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Yushi Tan\nDestination: Stockholm\nCountry: N/A\nStart Date: 2014-09-05 00:00:00\nEnd Date: 2024-09-06 00:00:00\nDuration (days): N/A\nHighlights: Stockholm\u2019s Archipelago is a true gem, offering scenic boat trips and quiet islands to explore. While the city is beautiful, be prepared for the high prices, especially in tourist areas. Overall, though, Stockholm is clean, welcoming, and packed with rich culture, making it a memorable destination.\r\n\r\n\nLatitude: 59.3327036\nLongitude: 18.0656255", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 502, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "d095ccee-dfd0-465e-a410-1630274d0bce": {"__data__": {"id_": "d095ccee-dfd0-465e-a410-1630274d0bce", "embedding": null, "metadata": {"traveler_name": "Joi", "destination_city": "New York", "start_date": "2016-12-25 00:00:00", "latitude": 40.7127753, "longitude": -74.0059728}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "c3ca79b8-3aea-4364-8361-9b0b51951522", "node_type": "4", "metadata": {"traveler_name": "Joi", "destination_city": "New York", "start_date": "2016-12-25 00:00:00", "latitude": 40.7127753, "longitude": -74.0059728}, "hash": "0dfdf1a0aae084b19c19d83a65f754a043f67c0baa2cdfadecdf62f352dc2718", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Joi\nDestination: New York\nCountry: N/A\nStart Date: 2016-12-25 00:00:00\nEnd Date: 2017-01-02 00:00:00\nDuration (days): N/A\nHighlights: Central Park, Broadway, MoMA, Intrepid Museum, American Museum of Natural History, Long Island and The High Line are worth visiting. \nLatitude: 40.7127753\nLongitude: -74.0059728", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 326, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "ed1e1cca-2d3f-4206-b115-ca5bd5f692af": {"__data__": {"id_": "ed1e1cca-2d3f-4206-b115-ca5bd5f692af", "embedding": null, "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Singapore", "start_date": "2014-09-02 00:00:00", "latitude": 1.352083, "longitude": 103.819836}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "ab8efc2a-e97b-4198-8abb-5f0ee915f412", "node_type": "4", "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Singapore", "start_date": "2014-09-02 00:00:00", "latitude": 1.352083, "longitude": 103.819836}, "hash": "901e664197f3b54cd1dbb0f4f1ce909754de98e61dbe987a1b8c905f574225e8", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Tracy Shen\nDestination: Singapore\nCountry: N/A\nStart Date: 2014-09-02 00:00:00\nEnd Date: 2014-09-04 00:00:00\nDuration (days): N/A\nHighlights: The Marina Bay Sands\u2019 water and light show left a lasting impression. Coming back to Asia from Australia, I suddenly felt the shift from rural landscapes to a bustling metropolis.\nLatitude: 1.352083\nLongitude: 103.819836", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 377, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "40bff8fd-e5ea-434b-a2f5-546c77709e0a": {"__data__": {"id_": "40bff8fd-e5ea-434b-a2f5-546c77709e0a", "embedding": null, "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Melbourne", "start_date": "2014-08-07 00:00:00", "latitude": -37.8136, "longitude": 144.9631}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "90395f33-a9d7-4875-906d-b6f7a072ef28", "node_type": "4", "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Melbourne", "start_date": "2014-08-07 00:00:00", "latitude": -37.8136, "longitude": 144.9631}, "hash": "16be2cc4792bd3e71046fbb390b9014082c404206f889a2a2d4d8e3fb1e0c45a", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Tracy Shen\nDestination: Melbourne\nCountry: N/A\nStart Date: 2014-08-07 00:00:00\nEnd Date: 2014-09-01 00:00:00\nDuration (days): N/A\nHighlights: The strong presence of the Chinese community made the experience feel warm and welcoming. The koalas and kangaroos at the zoo were adorable! A road trip to Canberra and the Great Ocean Road was fantastic, and a two-week stay at a meditation center in the mountains provided a peaceful retreat. Being far from the city allowed for a breathtaking view of the Milky Way at night.\nLatitude: -37.8136\nLongitude: 144.9631", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 572, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "5f21fbc5-8459-4477-b89b-59b791f3d064": {"__data__": {"id_": "5f21fbc5-8459-4477-b89b-59b791f3d064", "embedding": null, "metadata": {"traveler_name": "Liu shuhui", "destination_city": "Georgia", "start_date": "2024-06-08 00:00:00", "latitude": 42.315407, "longitude": 43.35689199999999}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "af2b1766-a414-4f0f-a335-c25a10315f59", "node_type": "4", "metadata": {"traveler_name": "Liu shuhui", "destination_city": "Georgia", "start_date": "2024-06-08 00:00:00", "latitude": 42.315407, "longitude": 43.35689199999999}, "hash": "c16fe3eb809d42fd90583d39239df3fcfdd7116bbc3422f2853af95cf2cd822a", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Liu shuhui\nDestination: Georgia\nCountry: N/A\nStart Date: 2024-06-08 00:00:00\nEnd Date: 2024-06-15 00:00:00\nDuration (days): N/A\nHighlights: Very friendly budget for Ski in Gudauri area. \nLatitude: 42.315407\nLongitude: 43.35689199999999", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 250, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "57416b99-df61-4e5e-a8ac-5c3fb99aec5d": {"__data__": {"id_": "57416b99-df61-4e5e-a8ac-5c3fb99aec5d", "embedding": null, "metadata": {"traveler_name": "Jialong Xu", "destination_city": "Bogota", "start_date": "2025-06-01 00:00:00", "latitude": 4.710988599999999, "longitude": -74.072092}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "b199dc03-c2bd-4575-86a1-b11decafa20e", "node_type": "4", "metadata": {"traveler_name": "Jialong Xu", "destination_city": "Bogota", "start_date": "2025-06-01 00:00:00", "latitude": 4.710988599999999, "longitude": -74.072092}, "hash": "dc9ea0718152e1f6cbe46094952555046aa7ddb816f2fc934ae9a9eb8244f18b", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Jialong Xu\nDestination: Bogota\nCountry: N/A\nStart Date: 2025-06-01 00:00:00\nEnd Date: 2025-06-09 00:00:00\nDuration (days): N/A\nHighlights: Heavy police presence in certain areas indicates safety concerns. Stay aware of pickpockets in tourist zones and avoid isolated areas at night. The trip was great but required more street awareness than expected.\nLatitude: 4.710988599999999\nLongitude: -74.072092", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 416, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}, "af223824-d6e3-46a6-982a-01fa2a04b06f": {"__data__": {"id_": "af223824-d6e3-46a6-982a-01fa2a04b06f", "embedding": null, "metadata": {"traveler_name": "Alex", "destination_city": "Scotland, UK", "start_date": "2015-08-10 00:00:00", "latitude": 56.49067119999999, "longitude": -4.2026458}, "excluded_embed_metadata_keys": [], "excluded_llm_metadata_keys": [], "relationships": {"1": {"node_id": "26b4a98b-0615-4649-a977-c5c4b9b82f40", "node_type": "4", "metadata": {"traveler_name": "Alex", "destination_city": "Scotland, UK", "start_date": "2015-08-10 00:00:00", "latitude": 56.49067119999999, "longitude": -4.2026458}, "hash": "8691aba649be531580f27c86f69fdb0339a423d1059d236cb403f9ed289bab8c", "class_name": "RelatedNodeInfo"}}, "metadata_template": "{key}: {value}", "metadata_separator": "\n", "text": "Traveler Name: Alex\nDestination: Scotland, UK\nCountry: N/A\nStart Date: 2015-08-10 00:00:00\nEnd Date: 2015-08-12 00:00:00\nDuration (days): N/A\nHighlights: The journey itself\u2014ferries, coastal drives, and short hikes\u2014was part of the magic. I loved how peaceful and remote the islands felt, especially Iona, which had a quiet, almost spiritual atmosphere. If you\u2019re into nature, geology, or simply escaping the crowd, the west coast of Scotland is a treasure.\nLatitude: 56.49067119999999\nLongitude: -4.2026458", "mimetype": "text/plain", "start_char_idx": 0, "end_char_idx": 505, "metadata_seperator": "\n", "text_template": "{metadata_str}\n\n{content}", "class_name": "TextNode"}, "__type__": "1"}}, "docstore/ref_doc_info": {"bb101f42-b81c-4488-999c-d3cb26224653": {"node_ids": ["118833ff-b0d8-4628-a3a5-efaf41c3e90c"], "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Helsinki", "start_date": "2025-05-28 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}}, "62ae5226-b04e-4c00-96e6-5f5ad48f69f3": {"node_ids": ["5ad3594b-22c9-474f-83ad-6d849cf2401b"], "metadata": {"traveler_name": "Hope", "destination_city": "Armsterdam", "start_date": "2025-02-01 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}}, "f5c370e9-06d5-43b0-8f91-fdcfb9e39445": {"node_ids": ["e1831c93-7292-485a-a70e-549f321b3b19"], "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Chongqing", "start_date": "2024-09-01 00:00:00", "latitude": 29.5656843, "longitude": 106.5511838}}, "9fcfe6c3-de7d-425c-9466-b3d38ed478d2": {"node_ids": ["f2106256-8633-4e75-920e-e547fa634d6f"], "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Luxor", "start_date": "2025-03-15 00:00:00", "latitude": 25.6872431, "longitude": 32.6396357}}, "b496d504-fc1b-45ee-aa60-16fc8a4b89f5": {"node_ids": ["5b2bfeda-fe49-47ea-8985-81284fde8f4e"], "metadata": {"traveler_name": "Hope", "destination_city": "Helsinki ", "start_date": "2025-05-30 00:00:00", "latitude": 60.16985569999999, "longitude": 24.938379}}, "2f305f8d-83c3-4edf-a6bd-0a23eaacdc8c": {"node_ids": ["d9e000b5-9ade-48a6-aece-3f8e4f02184f"], "metadata": {"traveler_name": "Jiao Chen", "destination_city": "Rovaniemi", "start_date": "2025-04-25 00:00:00", "latitude": 66.50394779999999, "longitude": 25.7293905}}, "247908d5-033f-4b93-a7b2-734d4b1ccaf9": {"node_ids": ["2383f256-33e0-46c7-a5c6-3dee50db45ce"], "metadata": {"traveler_name": "Hope", "destination_city": "Mangshi", "start_date": "2024-06-01 00:00:00", "latitude": 24.4337899, "longitude": 98.58815}}, "d54f1f7a-8f8b-4f1d-ace4-75425368fb78": {"node_ids": ["ff01ff0a-2f6d-4df0-b099-f94ebe16817d"], "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Tokyo", "start_date": "2024-06-06 00:00:00", "latitude": 35.6764225, "longitude": 139.650027}}, "d19b766a-3dc3-476b-966d-7653662afe4b": {"node_ids": ["a60bb298-8ce7-4aba-9eb9-545245567801"], "metadata": {"traveler_name": "Santeri Heino", "destination_city": "Riga", "start_date": "2024-08-12 00:00:00", "latitude": 56.9676941, "longitude": 24.1056221}}, "3b63eab1-6f0f-4c85-ba41-885cb6c5d253": {"node_ids": ["5f038e04-d26d-4f9e-b9c3-8c0d993d1769"], "metadata": {"traveler_name": "Min Khant", "destination_city": "Bangkok,Thailand", "start_date": "2024-02-21 00:00:00", "latitude": 13.7563309, "longitude": 100.5017651}}, "bd86c502-cf6d-4f8c-9402-2e03abdd8d3f": {"node_ids": ["65d5e162-c9d2-48af-9c3a-bed89ce949e9"], "metadata": {"traveler_name": "Matias", "destination_city": "Gran Canaria", "start_date": "2019-04-01 00:00:00", "latitude": 27.9202202, "longitude": -15.5474373}}, "d7589893-5529-493c-ab6b-5459fc44a698": {"node_ids": ["8af0a430-2a75-40ba-ae23-abe8d3535850"], "metadata": {"traveler_name": "Nikke", "destination_city": "Spain", "start_date": "2023-07-04 00:00:00", "latitude": 40.46366700000001, "longitude": -3.74922}}, "c61645a8-73f1-42b2-b2b1-bca2f82e1016": {"node_ids": ["ffe06152-4993-44ee-8420-29ef8f12610e"], "metadata": {"traveler_name": "Hx", "destination_city": "Amsterdam", "start_date": "2024-06-05 00:00:00", "latitude": 52.3675734, "longitude": 4.9041389}}, "fed218ef-7453-4323-a215-259fc59a0f6f": {"node_ids": ["3db9558a-ffb7-45d5-9310-524b4b12952e"], "metadata": {"traveler_name": "Evelyn", "destination_city": "Brussels, Belgium", "start_date": "2025-04-01 00:00:00", "latitude": 50.503887, "longitude": 4.469936}}, "d1109c92-031e-47e2-9c1d-89f1d6717a60": {"node_ids": ["b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93"], "metadata": {"traveler_name": "Kyrie", "destination_city": "Italy", "start_date": "2025-05-02 00:00:00", "latitude": 41.87194, "longitude": 12.56738}}, "0f962f60-69f7-47e9-8a61-a032320629b2": {"node_ids": ["55ef06b3-8957-4ffa-b9e6-4cae377ef834"], "metadata": {"traveler_name": "Hope", "destination_city": "Oulu", "start_date": "2025-05-23 00:00:00", "latitude": 65.0120888, "longitude": 25.4650773}}, "97c79db6-e797-4741-aba7-d795ab508a97": {"node_ids": ["71ddf846-cbda-4d3d-b5a6-47d398474e83"], "metadata": {"traveler_name": "LIN", "destination_city": "Paris", "start_date": "2025-03-01 00:00:00", "latitude": 48.8575475, "longitude": 2.3513765}}, "8d53562c-f999-45a2-ae5f-9a7b295a80b6": {"node_ids": ["0c57b44b-7d53-49ab-8068-b7268698958d"], "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Antwerp", "start_date": "2024-11-16 00:00:00", "latitude": 51.2199302, "longitude": 4.414990299999999}}, "a47213d0-0611-4d26-912b-212b46e3f4e2": {"node_ids": ["1837793e-04e2-41b3-8379-df8d259b80a7"], "metadata": {"traveler_name": "Yushi Tan", "destination_city": "Stockholm", "start_date": "2014-09-05 00:00:00", "latitude": 59.3327036, "longitude": 18.0656255}}, "c3ca79b8-3aea-4364-8361-9b0b51951522": {"node_ids": ["d095ccee-dfd0-465e-a410-1630274d0bce"], "metadata": {"traveler_name": "Joi", "destination_city": "New York", "start_date": "2016-12-25 00:00:00", "latitude": 40.7127753, "longitude": -74.0059728}}, "ab8efc2a-e97b-4198-8abb-5f0ee915f412": {"node_ids": ["ed1e1cca-2d3f-4206-b115-ca5bd5f692af"], "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Singapore", "start_date": "2014-09-02 00:00:00", "latitude": 1.352083, "longitude": 103.819836}}, "90395f33-a9d7-4875-906d-b6f7a072ef28": {"node_ids": ["40bff8fd-e5ea-434b-a2f5-546c77709e0a"], "metadata": {"traveler_name": "Tracy Shen", "destination_city": "Melbourne", "start_date": "2014-08-07 00:00:00", "latitude": -37.8136, "longitude": 144.9631}}, "af2b1766-a414-4f0f-a335-c25a10315f59": {"node_ids": ["5f21fbc5-8459-4477-b89b-59b791f3d064"], "metadata": {"traveler_name": "Liu shuhui", "destination_city": "Georgia", "start_date": "2024-06-08 00:00:00", "latitude": 42.315407, "longitude": 43.35689199999999}}, "b199dc03-c2bd-4575-86a1-b11decafa20e": {"node_ids": ["57416b99-df61-4e5e-a8ac-5c3fb99aec5d"], "metadata": {"traveler_name": "Jialong Xu", "destination_city": "Bogota", "start_date": "2025-06-01 00:00:00", "latitude": 4.710988599999999, "longitude": -74.072092}}, "26b4a98b-0615-4649-a977-c5c4b9b82f40": {"node_ids": ["af223824-d6e3-46a6-982a-01fa2a04b06f"], "metadata": {"traveler_name": "Alex", "destination_city": "Scotland, UK", "start_date": "2015-08-10 00:00:00", "latitude": 56.49067119999999, "longitude": -4.2026458}}}}
|
travel_data_index/graph_store.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"graph_dict": {}}
|
travel_data_index/image__vector_store.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {}}
|
travel_data_index/index_store.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"index_store/data": {"a1801608-40e2-4160-bf3d-b832d49c9f3f": {"__type__": "vector_store", "__data__": "{\"index_id\": \"a1801608-40e2-4160-bf3d-b832d49c9f3f\", \"summary\": null, \"nodes_dict\": {\"118833ff-b0d8-4628-a3a5-efaf41c3e90c\": \"118833ff-b0d8-4628-a3a5-efaf41c3e90c\", \"5ad3594b-22c9-474f-83ad-6d849cf2401b\": \"5ad3594b-22c9-474f-83ad-6d849cf2401b\", \"e1831c93-7292-485a-a70e-549f321b3b19\": \"e1831c93-7292-485a-a70e-549f321b3b19\", \"f2106256-8633-4e75-920e-e547fa634d6f\": \"f2106256-8633-4e75-920e-e547fa634d6f\", \"5b2bfeda-fe49-47ea-8985-81284fde8f4e\": \"5b2bfeda-fe49-47ea-8985-81284fde8f4e\", \"d9e000b5-9ade-48a6-aece-3f8e4f02184f\": \"d9e000b5-9ade-48a6-aece-3f8e4f02184f\", \"2383f256-33e0-46c7-a5c6-3dee50db45ce\": \"2383f256-33e0-46c7-a5c6-3dee50db45ce\", \"ff01ff0a-2f6d-4df0-b099-f94ebe16817d\": \"ff01ff0a-2f6d-4df0-b099-f94ebe16817d\", \"a60bb298-8ce7-4aba-9eb9-545245567801\": \"a60bb298-8ce7-4aba-9eb9-545245567801\", \"5f038e04-d26d-4f9e-b9c3-8c0d993d1769\": \"5f038e04-d26d-4f9e-b9c3-8c0d993d1769\", \"65d5e162-c9d2-48af-9c3a-bed89ce949e9\": \"65d5e162-c9d2-48af-9c3a-bed89ce949e9\", \"8af0a430-2a75-40ba-ae23-abe8d3535850\": \"8af0a430-2a75-40ba-ae23-abe8d3535850\", \"ffe06152-4993-44ee-8420-29ef8f12610e\": \"ffe06152-4993-44ee-8420-29ef8f12610e\", \"3db9558a-ffb7-45d5-9310-524b4b12952e\": \"3db9558a-ffb7-45d5-9310-524b4b12952e\", \"b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93\": \"b4eacbd5-9d81-49c3-bf20-dbf0dc8f0a93\", \"55ef06b3-8957-4ffa-b9e6-4cae377ef834\": \"55ef06b3-8957-4ffa-b9e6-4cae377ef834\", \"71ddf846-cbda-4d3d-b5a6-47d398474e83\": \"71ddf846-cbda-4d3d-b5a6-47d398474e83\", \"0c57b44b-7d53-49ab-8068-b7268698958d\": \"0c57b44b-7d53-49ab-8068-b7268698958d\", \"1837793e-04e2-41b3-8379-df8d259b80a7\": \"1837793e-04e2-41b3-8379-df8d259b80a7\", \"d095ccee-dfd0-465e-a410-1630274d0bce\": \"d095ccee-dfd0-465e-a410-1630274d0bce\", \"ed1e1cca-2d3f-4206-b115-ca5bd5f692af\": \"ed1e1cca-2d3f-4206-b115-ca5bd5f692af\", \"40bff8fd-e5ea-434b-a2f5-546c77709e0a\": \"40bff8fd-e5ea-434b-a2f5-546c77709e0a\", \"5f21fbc5-8459-4477-b89b-59b791f3d064\": \"5f21fbc5-8459-4477-b89b-59b791f3d064\", \"57416b99-df61-4e5e-a8ac-5c3fb99aec5d\": \"57416b99-df61-4e5e-a8ac-5c3fb99aec5d\", \"af223824-d6e3-46a6-982a-01fa2a04b06f\": \"af223824-d6e3-46a6-982a-01fa2a04b06f\"}, \"doc_id_dict\": {}, \"embeddings_dict\": {}}"}}}
|
trip_utils.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
def format_trip_summary(record: dict) -> str:
|
2 |
+
name = record.get("name", "Someone")
|
3 |
+
city = record.get("destinationName") or record.get("locationName")
|
4 |
+
start = record.get("startDate")
|
5 |
+
end = record.get("endDate")
|
6 |
+
start_str = start.strftime("%Y-%m-%d") if start else ""
|
7 |
+
end_str = end.strftime("%Y-%m-%d") if end else ""
|
8 |
+
date_range = f"{start_str} to {end_str}"
|
9 |
+
|
10 |
+
rating = record.get("rating")
|
11 |
+
accommodation = record.get("accommodation")
|
12 |
+
companion = record.get("companionType")
|
13 |
+
budget = record.get("budgetStyle")
|
14 |
+
highlights = record.get("highlights")
|
15 |
+
food = record.get("memorableFood")
|
16 |
+
impression = record.get("deepestImpressionSpot")
|
17 |
+
tips = record.get("travelTips")
|
18 |
+
itinerary = record.get("dailyBriefItinerary")
|
19 |
+
tags = ", ".join(record.get("keywordTags", []))
|
20 |
+
|
21 |
+
summary = f"**{name}'s Trip to {city}**\n"
|
22 |
+
summary += f"Date: {date_range}\n"
|
23 |
+
if rating: summary += f"⭐ Rating: {rating}/5\n"
|
24 |
+
if accommodation: summary += f"🏨 Stayed at: {accommodation}\n"
|
25 |
+
if companion: summary += f"🧑🤝🧑 With: {companion}\n"
|
26 |
+
if budget: summary += f"💰 Budget Style: {budget}\n"
|
27 |
+
if tags: summary += f"🏷️ Tags: {tags}\n"
|
28 |
+
if highlights: summary += f"\n✨ Highlights:\n{highlights}\n"
|
29 |
+
if food: summary += f"\n🍜 Memorable Food:\n{food}\n"
|
30 |
+
if impression: summary += f"\n📍 Impression Spot:\n{impression}\n"
|
31 |
+
if tips: summary += f"\n📝 Tips:\n{tips}\n"
|
32 |
+
if itinerary: summary += f"\n📅 Itinerary:\n{itinerary}\n"
|
33 |
+
|
34 |
+
return summary
|
uv.lock
ADDED
The diff for this file is too large to render.
See raw diff
|
|