File size: 2,993 Bytes
6922b6c
 
 
 
 
 
 
 
 
 
8a55917
6922b6c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import os
from dotenv import load_dotenv
from openai.types.responses import ResponseTextDeltaEvent
from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel
from agents.mcp import MCPServerSse

# Load environment
load_dotenv()

# MCP tool URL (replace with actual URL you got from step 4.3)
SERVER_URL = "https://agents-mcp-hackathon-travel-planner-mcp.hf.space/gradio_api/mcp/sse"

instructions = """
You are an expert travel planner.
Given a user query, resolve it, if needed you will use the connected MCP tool to assist with travel planning tasks. 


Your tasks include:

1. Search for flight options using the tools from the mcp server,
2. Search for hotel options using the tools from the mcp server, keeping in mind the user's preferences.
3. Once you've decided the best options based on the user query, for each flight and hotel, include the booking link, for hotels you may include thumbnail image links.
4. Estimate the total budget (flight + hotel for the stay).
4. Suggest a list of fun activities at the destination based on your knowledge about the destination.
5. Format the entire report in Markdown with clear headings and bullet points.

any other suggestions which make the trip fun are also welcome.
"""

mcp_server = MCPServerSse({"url": SERVER_URL})
model = OpenAIChatCompletionsModel( 
    model="Qwen/Qwen3-235B-A22B",
    openai_client=AsyncOpenAI(base_url="https://api.studio.nebius.com/v1/", api_key=os.getenv("NEBIUS_API_KEY"))
)
async def get_itinerary(user_query, history):
    try:
        await mcp_server.connect()
        agent = Agent(
            name="MCP Agent",
            instructions=instructions,
            mcp_servers=[mcp_server],
            model=model,
        )
        
        task = "You are a helpful assistant. Use the connected tool to assist with tasks."
        task += f"\n\nUser Query: {user_query}\n\n"

        think_buffer = ""
        response_buffer = ""
        showing_thought = False
        
        result = Runner.run_streamed(agent, task)
        async for event in result.stream_events():
            if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent):
                token = event.data.delta
                
                if "<think>" in token:
                    showing_thought = True
                    think_buffer = ""
                    continue
                elif "</think>" in token:
                    showing_thought = False
                    yield f"🤔 *Thinking:* {think_buffer}\n"
                    continue
                    
                if showing_thought:
                    think_buffer += token
                    if len(think_buffer) > 0:
                        yield f"🤔 *Thinking:* {think_buffer}"
                else:
                    response_buffer += token
                    if len(response_buffer) > 0:
                        yield f"{response_buffer}"

    finally:
        await mcp_server.cleanup()