daniielyan commited on
Commit
4fd18a2
Β·
1 Parent(s): e84eb34

πŸ”¨ Implement core functionality for Job Search MCP Server, including user profile management, job search, cover letter generation, and Q&A response tools. Add configuration and service layers, and establish dependency management with uv. Introduce .gitignore and .python-version files for environment setup.

Browse files
.cursor/rules/uv-package-manager.mdc ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ description:
3
+ globs:
4
+ alwaysApply: true
5
+ ---
6
+ # Package Management with `uv`
7
+
8
+ These rules define strict guidelines for managing Python dependencies in this project using the `uv` dependency manager.
9
+
10
+ **βœ… Use `uv` exclusively**
11
+
12
+ - All Python dependencies **must be installed, synchronized, and locked** using `uv`.
13
+ - Never use `pip`, `pip-tools`, or `poetry` directly for dependency management.
14
+
15
+ **πŸ” Managing Dependencies**
16
+
17
+ Always use these commands:
18
+
19
+ ```bash
20
+ # Add or upgrade dependencies
21
+ uv add <package>
22
+
23
+ # Remove dependencies
24
+ uv remove <package>
25
+
26
+ # Reinstall all dependencies from lock file
27
+ uv sync
28
+ ```
29
+
30
+ **πŸ” Scripts**
31
+
32
+ ```bash
33
+ # Run script with proper dependencies
34
+ uv run script.py
35
+ ```
36
+
37
+ You can edit inline-metadata manually:
38
+
39
+ ```python
40
+ # /// script
41
+ # requires-python = ">=3.12"
42
+ # dependencies = [
43
+ # "torch",
44
+ # "torchvision",
45
+ # "opencv-python",
46
+ # "numpy",
47
+ # "matplotlib",
48
+ # "Pillow",
49
+ # "timm",
50
+ # ]
51
+ # ///
52
+
53
+ print("some python code")
54
+ ```
55
+
56
+ Or using uv cli:
57
+
58
+ ```bash
59
+ # Add or upgrade script dependencies
60
+ uv add package-name --script script.py
61
+
62
+ # Remove script dependencies
63
+ uv remove package-name --script script.py
64
+
65
+ # Reinstall all script dependencies from lock file
66
+ uv sync --script script.py
67
+ ```
68
+
69
+
70
+
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .venv
2
+ __pycache__
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.13
README.md CHANGED
@@ -1 +0,0 @@
1
- # jobsearch-mcp-server
 
 
app.py DELETED
@@ -1,230 +0,0 @@
1
- import gradio as gr
2
- import json
3
- from typing import Dict, List, Any
4
-
5
-
6
- class JobSearchMCP:
7
- """Job Search MCP Server - Smart job matching and application helper"""
8
-
9
- def __init__(self):
10
- self.user_profiles = {} # In-memory storage for demo purposes
11
-
12
- def profile_upsert(self, user_id: str, profile_data: str) -> Dict[str, Any]:
13
- """
14
- Stores/updates user rΓ©sumΓ©, skills, salary expectations, and career goals
15
-
16
- Args:
17
- user_id: Unique identifier for the user
18
- profile_data: JSON string containing user profile information
19
-
20
- Returns:
21
- Dict with success status and message
22
- """
23
- try:
24
- # TODO: Implement profile storage logic
25
- # - Parse profile_data JSON
26
- # - Validate required fields (resume, skills, salary_wish, career_goals)
27
- # - Store in database or persistent storage
28
- # - Return success/error response
29
-
30
- return {
31
- "success": True,
32
- "message": "Profile updated successfully",
33
- "user_id": user_id,
34
- }
35
- except Exception as e:
36
- return {"success": False, "message": f"Error updating profile: {str(e)}"}
37
-
38
- def jobs_search(
39
- self, user_id: str, query: str = "", location: str = "", job_type: str = ""
40
- ) -> Dict[str, Any]:
41
- """
42
- Pulls fresh job posts, ranks them with GPU embeddings, and returns fit scores
43
-
44
- Args:
45
- user_id: User identifier to get personalized results
46
- query: Job search query/keywords
47
- location: Preferred job location
48
- job_type: Type of job (full-time, contract, remote, etc.)
49
-
50
- Returns:
51
- Dict with ranked job listings and fit scores
52
- """
53
- try:
54
- # TODO: Implement job search logic
55
- # - Fetch jobs from various APIs (LinkedIn, Indeed, etc.)
56
- # - Use GPU embeddings to calculate job-profile fit scores
57
- # - Rank jobs by relevance and fit score
58
- # - Return top matches with metadata
59
-
60
- return {
61
- "success": True,
62
- "jobs": [],
63
- "total_found": 0,
64
- "search_params": {
65
- "query": query,
66
- "location": location,
67
- "job_type": job_type,
68
- },
69
- }
70
- except Exception as e:
71
- return {"success": False, "message": f"Error searching jobs: {str(e)}"}
72
-
73
- def letter_generate(
74
- self, user_id: str, job_description: str, tone: str = "professional"
75
- ) -> Dict[str, Any]:
76
- """
77
- Generates personalized cover letters using LLM
78
-
79
- Args:
80
- user_id: User identifier to access profile
81
- job_description: The job posting description
82
- tone: Tone of the cover letter (professional, casual, enthusiastic, etc.)
83
-
84
- Returns:
85
- Dict with generated cover letter
86
- """
87
- try:
88
- # TODO: Implement cover letter generation
89
- # - Retrieve user profile
90
- # - Use LLM to generate personalized cover letter
91
- # - Match user skills with job requirements
92
- # - Apply specified tone
93
- # - Return formatted cover letter
94
-
95
- return {
96
- "success": True,
97
- "cover_letter": "",
98
- "tone_used": tone,
99
- "word_count": 0,
100
- }
101
- except Exception as e:
102
- return {
103
- "success": False,
104
- "message": f"Error generating cover letter: {str(e)}",
105
- }
106
-
107
- def qa_reply(
108
- self, user_id: str, question: str, context: str = ""
109
- ) -> Dict[str, Any]:
110
- """
111
- Drafts concise answers to client questions
112
-
113
- Args:
114
- user_id: User identifier to access profile
115
- question: The question from potential employer/client
116
- context: Additional context about the conversation
117
-
118
- Returns:
119
- Dict with generated response
120
- """
121
- try:
122
- # TODO: Implement Q&A response generation
123
- # - Retrieve user profile and experience
124
- # - Generate contextual response using LLM
125
- # - Keep response concise and professional
126
- # - Tailor to user's background and skills
127
-
128
- return {"success": True, "response": "", "confidence_score": 0.0}
129
- except Exception as e:
130
- return {"success": False, "message": f"Error generating response: {str(e)}"}
131
-
132
-
133
- # Initialize the MCP server
134
- mcp_server = JobSearchMCP()
135
-
136
- # Create Gradio interface for each endpoint
137
- with gr.Blocks(title="Job Search MCP Server") as demo:
138
- gr.Markdown("# πŸ” Job Search MCP Server")
139
- gr.Markdown("Smart job matching and instant application helper")
140
-
141
- with gr.Tab("Profile Management"):
142
- gr.Markdown("### Store and update your professional profile")
143
- with gr.Row():
144
- profile_user_id = gr.Textbox(
145
- label="User ID", placeholder="Enter your unique user ID"
146
- )
147
- profile_data = gr.TextArea(
148
- label="Profile Data (JSON)",
149
- placeholder='{"resume": "...", "skills": [...], "salary_wish": "...", "career_goals": "..."}',
150
- lines=5,
151
- )
152
- profile_submit = gr.Button("Update Profile", variant="primary")
153
- profile_output = gr.JSON(label="Response")
154
-
155
- profile_submit.click(
156
- fn=mcp_server.profile_upsert,
157
- inputs=[profile_user_id, profile_data],
158
- outputs=profile_output,
159
- )
160
-
161
- with gr.Tab("Job Search"):
162
- gr.Markdown("### Find and rank relevant job opportunities")
163
- with gr.Row():
164
- search_user_id = gr.Textbox(label="User ID")
165
- search_query = gr.Textbox(
166
- label="Search Query", placeholder="e.g., Python developer"
167
- )
168
- with gr.Row():
169
- search_location = gr.Textbox(
170
- label="Location", placeholder="e.g., Remote, New York"
171
- )
172
- search_job_type = gr.Dropdown(
173
- label="Job Type",
174
- choices=["full-time", "part-time", "contract", "freelance", "remote"],
175
- value="full-time",
176
- )
177
- search_submit = gr.Button("Search Jobs", variant="primary")
178
- search_output = gr.JSON(label="Job Results")
179
-
180
- search_submit.click(
181
- fn=mcp_server.jobs_search,
182
- inputs=[search_user_id, search_query, search_location, search_job_type],
183
- outputs=search_output,
184
- )
185
-
186
- with gr.Tab("Cover Letter Generator"):
187
- gr.Markdown("### Generate personalized cover letters")
188
- with gr.Row():
189
- letter_user_id = gr.Textbox(label="User ID")
190
- letter_tone = gr.Dropdown(
191
- label="Tone",
192
- choices=["professional", "casual", "enthusiastic", "formal"],
193
- value="professional",
194
- )
195
- letter_job_desc = gr.TextArea(
196
- label="Job Description",
197
- placeholder="Paste the job description here...",
198
- lines=5,
199
- )
200
- letter_submit = gr.Button("Generate Cover Letter", variant="primary")
201
- letter_output = gr.JSON(label="Generated Letter")
202
-
203
- letter_submit.click(
204
- fn=mcp_server.letter_generate,
205
- inputs=[letter_user_id, letter_job_desc, letter_tone],
206
- outputs=letter_output,
207
- )
208
-
209
- with gr.Tab("Q&A Assistant"):
210
- gr.Markdown("### Get help with interview questions and client responses")
211
- with gr.Row():
212
- qa_user_id = gr.Textbox(label="User ID")
213
- qa_context = gr.Textbox(
214
- label="Context (optional)", placeholder="Additional context..."
215
- )
216
- qa_question = gr.TextArea(
217
- label="Question", placeholder="e.g., Why should we hire you?", lines=3
218
- )
219
- qa_submit = gr.Button("Generate Response", variant="primary")
220
- qa_output = gr.JSON(label="Generated Response")
221
-
222
- qa_submit.click(
223
- fn=mcp_server.qa_reply,
224
- inputs=[qa_user_id, qa_question, qa_context],
225
- outputs=qa_output,
226
- )
227
-
228
- if __name__ == "__main__":
229
- # Enable MCP server functionality
230
- demo.launch(enable_mcp=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.py DELETED
@@ -1,62 +0,0 @@
1
- import os
2
- from dotenv import load_dotenv
3
-
4
- # Load environment variables from .env file
5
- load_dotenv()
6
-
7
-
8
- class Config:
9
- """Configuration settings for Job Search MCP Server"""
10
-
11
- # API Keys
12
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
13
- ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
14
- HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
15
-
16
- # Job Search APIs
17
- LINKEDIN_API_KEY = os.getenv("LINKEDIN_API_KEY")
18
- INDEED_API_KEY = os.getenv("INDEED_API_KEY")
19
- GLASSDOOR_API_KEY = os.getenv("GLASSDOOR_API_KEY")
20
-
21
- # Database Settings (for future implementation)
22
- DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///jobsearch.db")
23
-
24
- # Embedding Model Settings
25
- EMBEDDING_MODEL = os.getenv(
26
- "EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"
27
- )
28
- EMBEDDING_CACHE_SIZE = int(os.getenv("EMBEDDING_CACHE_SIZE", "1000"))
29
-
30
- # LLM Settings
31
- DEFAULT_LLM_MODEL = os.getenv("DEFAULT_LLM_MODEL", "gpt-3.5-turbo")
32
- MAX_TOKENS = int(os.getenv("MAX_TOKENS", "1000"))
33
- TEMPERATURE = float(os.getenv("TEMPERATURE", "0.7"))
34
-
35
- # Job Search Settings
36
- MAX_JOBS_PER_SEARCH = int(os.getenv("MAX_JOBS_PER_SEARCH", "50"))
37
- MIN_MATCH_SCORE = float(os.getenv("MIN_MATCH_SCORE", "0.6"))
38
-
39
- # Cover Letter Settings
40
- MAX_COVER_LETTER_WORDS = int(os.getenv("MAX_COVER_LETTER_WORDS", "300"))
41
-
42
- # Server Settings
43
- SERVER_HOST = os.getenv("SERVER_HOST", "127.0.0.1")
44
- SERVER_PORT = int(os.getenv("SERVER_PORT", "7860"))
45
- DEBUG = os.getenv("DEBUG", "False").lower() == "true"
46
-
47
- @classmethod
48
- def validate_config(cls):
49
- """Validate that required configuration is present"""
50
- required_keys = ["OPENAI_API_KEY", "ANTHROPIC_API_KEY"]
51
-
52
- missing_keys = []
53
- for key in required_keys:
54
- if not getattr(cls, key):
55
- missing_keys.append(key)
56
-
57
- if missing_keys:
58
- raise ValueError(
59
- f"Missing required environment variables: {', '.join(missing_keys)}"
60
- )
61
-
62
- return True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
main.py ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Job Search MCP Server - Smart job matching and instant application helper
4
+
5
+ Main entry point for the MCP server that exposes the four core endpoints:
6
+ 1. profile.upsert - Store user rΓ©sumΓ©, skills, salary wish, and career goals
7
+ 2. jobs.search - Pull fresh job posts, rank with GPU embeddings, return fit scores
8
+ 3. letter.generate - Create personalized cover letters using LLM
9
+ 4. qa.reply - Draft concise answers to client questions
10
+
11
+ This server uses both GPU processing for embeddings and LLM APIs for text generation,
12
+ demonstrating efficient use of both credit pools.
13
+ """
14
+
15
+ import asyncio
16
+ import gradio as gr
17
+ from typing import Dict, Any
18
+
19
+ from src.tools import ProfileTool, JobSearchTool, CoverLetterTool, QATool
20
+ from src.config import get_settings
21
+
22
+
23
+ class JobSearchMCPServer:
24
+ """Main MCP server class integrating all job search tools."""
25
+
26
+ def __init__(self):
27
+ self.settings = get_settings()
28
+
29
+ # Initialize all tools
30
+ self.profile_tool = ProfileTool()
31
+ self.job_search_tool = JobSearchTool()
32
+ self.cover_letter_tool = CoverLetterTool()
33
+ self.qa_tool = QATool()
34
+
35
+ print(f"πŸš€ Job Search MCP Server initialized")
36
+ print(f"πŸ“Š GPU Embeddings: {self.settings.embedding_model}")
37
+ print(f"πŸ€– LLM Provider: {self.settings.llm_provider}")
38
+
39
+ # Core MCP Endpoints
40
+
41
+ def profile_upsert(self, user_id: str, profile_data: str) -> Dict[str, Any]:
42
+ """
43
+ MCP Endpoint: profile.upsert
44
+
45
+ Stores the user rΓ©sumΓ©, skills, salary expectations, and career goals.
46
+ Keeps personal context so every later call is tailored.
47
+ """
48
+ return self.profile_tool.upsert(user_id, profile_data)
49
+
50
+ def jobs_search(
51
+ self, user_id: str, query: str = "", location: str = "", job_type: str = ""
52
+ ) -> Dict[str, Any]:
53
+ """
54
+ MCP Endpoint: jobs.search
55
+
56
+ Pulls fresh job posts, ranks them with GPU embeddings, and returns a fit score.
57
+ Users see the most relevant roles first, no endless scrolling.
58
+ """
59
+ return self.job_search_tool.search(user_id, query, location, job_type)
60
+
61
+ def letter_generate(
62
+ self, user_id: str, job_description: str, tone: str = "professional"
63
+ ) -> Dict[str, Any]:
64
+ """
65
+ MCP Endpoint: letter.generate
66
+
67
+ Calls an LLM to create a short, personalized cover letter in any tone.
68
+ Saves time and improves response quality.
69
+ """
70
+ return self.cover_letter_tool.generate(user_id, job_description, tone)
71
+
72
+ def qa_reply(
73
+ self, user_id: str, question: str, context: str = ""
74
+ ) -> Dict[str, Any]:
75
+ """
76
+ MCP Endpoint: qa.reply
77
+
78
+ Drafts concise answers to client questions like "Why should we hire you?"
79
+ Speeds up Upwork, Fiverr, or LinkedIn chats.
80
+ """
81
+ return self.qa_tool.reply(user_id, question, context)
82
+
83
+ # Additional Helper Endpoints
84
+
85
+ def get_server_stats(self) -> Dict[str, Any]:
86
+ """Get server statistics and health information."""
87
+ try:
88
+ from src.services import EmbeddingService
89
+
90
+ embedding_service = EmbeddingService()
91
+ embed_stats = embedding_service.get_index_stats()
92
+
93
+ return {
94
+ "success": True,
95
+ "server_info": {
96
+ "app_name": self.settings.app_name,
97
+ "embedding_model": self.settings.embedding_model,
98
+ "llm_provider": self.settings.llm_provider,
99
+ "llm_model": self.settings.llm_model,
100
+ },
101
+ "embedding_stats": embed_stats,
102
+ "endpoints": [
103
+ "profile.upsert",
104
+ "jobs.search",
105
+ "letter.generate",
106
+ "qa.reply",
107
+ ],
108
+ }
109
+ except Exception as e:
110
+ return {
111
+ "success": False,
112
+ "message": f"Error getting server stats: {str(e)}",
113
+ }
114
+
115
+
116
+ # Initialize the MCP server
117
+ mcp_server = JobSearchMCPServer()
118
+
119
+
120
+ # Create Gradio interface for easy testing and demonstration
121
+ def create_gradio_interface():
122
+ """Create a Gradio interface for the MCP server."""
123
+
124
+ with gr.Blocks(
125
+ title="Job Search MCP Server",
126
+ theme=gr.themes.Soft(),
127
+ css="""
128
+ .gradio-container {
129
+ max-width: 1200px !important;
130
+ }
131
+ .main-header {
132
+ text-align: center;
133
+ background: linear-gradient(45deg, #667eea 0%, #764ba2 100%);
134
+ color: white;
135
+ padding: 20px;
136
+ border-radius: 10px;
137
+ margin-bottom: 20px;
138
+ }
139
+ """,
140
+ ) as demo:
141
+ # Header
142
+ gr.HTML("""
143
+ <div class="main-header">
144
+ <h1>πŸ” Job Search MCP Server</h1>
145
+ <p>Smart job matching and instant application helper</p>
146
+ <p><strong>4 Core Endpoints:</strong> profile.upsert | jobs.search | letter.generate | qa.reply</p>
147
+ </div>
148
+ """)
149
+
150
+ # Server Stats
151
+ with gr.Row():
152
+ with gr.Column():
153
+ stats_button = gr.Button("πŸ“Š Get Server Stats", variant="secondary")
154
+ stats_output = gr.JSON(label="Server Statistics")
155
+
156
+ stats_button.click(fn=mcp_server.get_server_stats, outputs=stats_output)
157
+
158
+ # Main endpoints in tabs
159
+ with gr.Tabs():
160
+ # Profile Management Tab
161
+ with gr.Tab("πŸ‘€ Profile Management (profile.upsert)"):
162
+ gr.Markdown("### Store and update your professional profile")
163
+ gr.Markdown(
164
+ "*This endpoint keeps personal context so every later call is tailored*"
165
+ )
166
+
167
+ with gr.Row():
168
+ with gr.Column():
169
+ profile_user_id = gr.Textbox(
170
+ label="User ID",
171
+ placeholder="Enter your unique user ID (e.g., john_doe_2024)",
172
+ value="demo_user",
173
+ )
174
+ profile_data = gr.TextArea(
175
+ label="Profile Data (JSON)",
176
+ placeholder='{\n "resume": "Full resume text here...",\n "skills": ["Python", "JavaScript", "React", "Node.js"],\n "salary_wish": "$80,000 - $120,000 annually",\n "career_goals": "Looking to transition into a senior full-stack developer role at a tech company",\n "experience_level": "Mid-level",\n "location": "Remote",\n "education": "BS Computer Science"\n}',
177
+ lines=8,
178
+ )
179
+ profile_submit = gr.Button(
180
+ "πŸ’Ύ Update Profile", variant="primary"
181
+ )
182
+
183
+ with gr.Column():
184
+ profile_output = gr.JSON(label="Response")
185
+
186
+ # Quick profile actions
187
+ with gr.Row():
188
+ get_profile_btn = gr.Button(
189
+ "πŸ‘οΈ View Profile", variant="secondary"
190
+ )
191
+ delete_profile_btn = gr.Button(
192
+ "πŸ—‘οΈ Delete Profile", variant="secondary"
193
+ )
194
+
195
+ profile_submit.click(
196
+ fn=mcp_server.profile_upsert,
197
+ inputs=[profile_user_id, profile_data],
198
+ outputs=profile_output,
199
+ )
200
+
201
+ get_profile_btn.click(
202
+ fn=mcp_server.profile_tool.get,
203
+ inputs=[profile_user_id],
204
+ outputs=profile_output,
205
+ )
206
+
207
+ delete_profile_btn.click(
208
+ fn=mcp_server.profile_tool.delete,
209
+ inputs=[profile_user_id],
210
+ outputs=profile_output,
211
+ )
212
+
213
+ # Job Search Tab
214
+ with gr.Tab("πŸ” Job Search (jobs.search)"):
215
+ gr.Markdown(
216
+ "### Find and rank relevant job opportunities with GPU embeddings"
217
+ )
218
+ gr.Markdown(
219
+ "*Pulls fresh job posts, ranks them with GPU embeddings, and returns fit scores*"
220
+ )
221
+
222
+ with gr.Row():
223
+ with gr.Column():
224
+ search_user_id = gr.Textbox(label="User ID", value="demo_user")
225
+ search_query = gr.Textbox(
226
+ label="Search Query",
227
+ placeholder="e.g., Python developer, Data scientist, Frontend engineer",
228
+ )
229
+
230
+ with gr.Row():
231
+ search_location = gr.Textbox(
232
+ label="Location",
233
+ placeholder="e.g., Remote, New York, San Francisco",
234
+ )
235
+ search_job_type = gr.Dropdown(
236
+ label="Job Type",
237
+ choices=[
238
+ "full-time",
239
+ "part-time",
240
+ "contract",
241
+ "freelance",
242
+ "remote",
243
+ ],
244
+ value="full-time",
245
+ )
246
+
247
+ search_submit = gr.Button("πŸ” Search Jobs", variant="primary")
248
+
249
+ with gr.Column():
250
+ search_output = gr.JSON(label="Job Results with Fit Scores")
251
+
252
+ # Additional job search features
253
+ suggestions_btn = gr.Button(
254
+ "πŸ’‘ Get Search Suggestions", variant="secondary"
255
+ )
256
+ clear_cache_btn = gr.Button(
257
+ "πŸ—‘οΈ Clear Job Cache", variant="secondary"
258
+ )
259
+
260
+ search_submit.click(
261
+ fn=mcp_server.jobs_search,
262
+ inputs=[
263
+ search_user_id,
264
+ search_query,
265
+ search_location,
266
+ search_job_type,
267
+ ],
268
+ outputs=search_output,
269
+ )
270
+
271
+ suggestions_btn.click(
272
+ fn=mcp_server.job_search_tool.get_search_suggestions,
273
+ inputs=[search_user_id],
274
+ outputs=search_output,
275
+ )
276
+
277
+ clear_cache_btn.click(
278
+ fn=mcp_server.job_search_tool.clear_job_cache, outputs=search_output
279
+ )
280
+
281
+ # Cover Letter Generator Tab
282
+ with gr.Tab("πŸ“ Cover Letter (letter.generate)"):
283
+ gr.Markdown("### Generate personalized cover letters using LLM")
284
+ gr.Markdown(
285
+ "*Creates short, personalized cover letters in any tone - saves time and improves quality*"
286
+ )
287
+
288
+ with gr.Row():
289
+ with gr.Column():
290
+ letter_user_id = gr.Textbox(label="User ID", value="demo_user")
291
+ letter_tone = gr.Dropdown(
292
+ label="Tone",
293
+ choices=[
294
+ "professional",
295
+ "casual",
296
+ "enthusiastic",
297
+ "formal",
298
+ ],
299
+ value="professional",
300
+ )
301
+ letter_job_desc = gr.TextArea(
302
+ label="Job Description",
303
+ placeholder="Paste the complete job description here...",
304
+ lines=6,
305
+ )
306
+ letter_submit = gr.Button(
307
+ "πŸ“ Generate Cover Letter", variant="primary"
308
+ )
309
+
310
+ with gr.Column():
311
+ letter_output = gr.JSON(label="Generated Cover Letter")
312
+
313
+ # Additional cover letter features
314
+ multiple_tones_btn = gr.Button(
315
+ "🎭 Generate Multiple Tones", variant="secondary"
316
+ )
317
+ template_btn = gr.Button("πŸ“‹ Get Template", variant="secondary")
318
+
319
+ letter_submit.click(
320
+ fn=mcp_server.letter_generate,
321
+ inputs=[letter_user_id, letter_job_desc, letter_tone],
322
+ outputs=letter_output,
323
+ )
324
+
325
+ multiple_tones_btn.click(
326
+ fn=mcp_server.cover_letter_tool.generate_multiple_tones,
327
+ inputs=[letter_user_id, letter_job_desc],
328
+ outputs=letter_output,
329
+ )
330
+
331
+ template_btn.click(
332
+ fn=mcp_server.cover_letter_tool.get_cover_letter_template,
333
+ inputs=[letter_tone],
334
+ outputs=letter_output,
335
+ )
336
+
337
+ # Q&A Assistant Tab
338
+ with gr.Tab("πŸ’¬ Q&A Assistant (qa.reply)"):
339
+ gr.Markdown(
340
+ "### Get help with interview questions and client responses"
341
+ )
342
+ gr.Markdown(
343
+ "*Drafts concise answers to speed up Upwork, Fiverr, or LinkedIn chats*"
344
+ )
345
+
346
+ with gr.Row():
347
+ with gr.Column():
348
+ qa_user_id = gr.Textbox(label="User ID", value="demo_user")
349
+ qa_question = gr.TextArea(
350
+ label="Question",
351
+ placeholder="e.g., Why should we hire you?\nWhat's your experience with Python?\nHow much do you charge for this project?",
352
+ lines=4,
353
+ )
354
+ qa_context = gr.Textbox(
355
+ label="Context (optional)",
356
+ placeholder="Additional context about the role or conversation...",
357
+ )
358
+ qa_submit = gr.Button("πŸ’¬ Generate Response", variant="primary")
359
+
360
+ with gr.Column():
361
+ qa_output = gr.JSON(label="Generated Response")
362
+
363
+ # Additional Q&A features
364
+ with gr.Row():
365
+ common_questions_btn = gr.Button(
366
+ "❓ Common Questions", variant="secondary"
367
+ )
368
+ practice_session_btn = gr.Button(
369
+ "🎯 Practice Session", variant="secondary"
370
+ )
371
+
372
+ qa_submit.click(
373
+ fn=mcp_server.qa_reply,
374
+ inputs=[qa_user_id, qa_question, qa_context],
375
+ outputs=qa_output,
376
+ )
377
+
378
+ common_questions_btn.click(
379
+ fn=lambda: mcp_server.qa_tool.get_common_questions("developer"),
380
+ outputs=qa_output,
381
+ )
382
+
383
+ practice_session_btn.click(
384
+ fn=lambda uid: mcp_server.qa_tool.practice_session(
385
+ uid, "developer", 3
386
+ ),
387
+ inputs=[qa_user_id],
388
+ outputs=qa_output,
389
+ )
390
+
391
+ # Footer with usage information
392
+ gr.HTML("""
393
+ <div style="margin-top: 40px; padding: 20px; background-color: #f0f0f0; border-radius: 10px;">
394
+ <h3>🎯 How It Works</h3>
395
+ <p><strong>GPU Part (T4-small):</strong> The server embeds user profile text and each job post with a modern sentence-embedding model. A FAISS index runs similarity search in real time.</p>
396
+ <p><strong>Inference-API Part:</strong> A hosted LLM writes cover letters and Q&A replies. Average call is under 300 tokens.</p>
397
+ <p><strong>Typical User Flow:</strong></p>
398
+ <ol>
399
+ <li>Upload rΓ©sumΓ© and skills once using <code>profile.upsert</code></li>
400
+ <li>Call <code>jobs.search</code> with a role keyword (e.g., "LLM engineer")</li>
401
+ <li>Get a ranked list of matches with fit percentages</li>
402
+ <li>Pick a job ID and call <code>letter.generate</code> to copy a ready cover letter</li>
403
+ <li>When the recruiter asks something, send the question to <code>qa.reply</code> for an instant answer</li>
404
+ </ol>
405
+ <p><strong>Benefits:</strong> Cuts application time by 80%+, reduces copy-pasted cover letters, improves job-to-skill matching</p>
406
+ </div>
407
+ """)
408
+
409
+ return demo
410
+
411
+
412
+ def main():
413
+ """Main entry point for the application."""
414
+ print("πŸš€ Starting Job Search MCP Server...")
415
+
416
+ # Create and launch Gradio interface
417
+ demo = create_gradio_interface()
418
+
419
+ # Launch with MCP enabled
420
+ demo.launch(
421
+ server_name=mcp_server.settings.host,
422
+ server_port=mcp_server.settings.port,
423
+ enable_mcp=True,
424
+ share=False,
425
+ show_error=True,
426
+ )
427
+
428
+
429
+ if __name__ == "__main__":
430
+ main()
pyproject.toml ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "jobsearch-mcp-server"
3
+ version = "0.1.0"
4
+ description = "Smart job matching and instant application helper MCP server"
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "gradio[mcp]>=5.0.0",
9
+ "openai>=1.0.0",
10
+ "anthropic>=0.30.0",
11
+ "requests>=2.31.0",
12
+ "python-dotenv>=1.0.0",
13
+ "numpy>=1.24.0",
14
+ "sentence-transformers>=2.2.0",
15
+ "scikit-learn>=1.3.0",
16
+ "pandas>=2.0.0",
17
+ "beautifulsoup4>=4.12.0",
18
+ "lxml>=4.9.0",
19
+ "httpx>=0.24.0",
20
+ "pydantic>=2.0.0",
21
+ "python-multipart>=0.0.6",
22
+ "faiss-cpu>=1.7.0",
23
+ "torch>=2.0.0",
24
+ "transformers>=4.30.0",
25
+ "datasets>=2.14.0",
26
+ "aiohttp>=3.8.0",
27
+ "typing-extensions>=4.5.0"
28
+ ]
29
+
30
+ [project.optional-dependencies]
31
+ dev = [
32
+ "pytest>=7.0.0",
33
+ "black>=23.0.0",
34
+ "isort>=5.12.0",
35
+ "flake8>=6.0.0",
36
+ "mypy>=1.0.0"
37
+ ]
38
+
39
+ [build-system]
40
+ requires = ["hatchling"]
41
+ build-backend = "hatchling.build"
42
+
43
+ [tool.hatch.build.targets.wheel]
44
+ packages = ["src"]
45
+
46
+ [tool.black]
47
+ line-length = 88
48
+ target-version = ['py310']
49
+
50
+ [tool.isort]
51
+ profile = "black"
src/config/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ """Configuration module for Job Search MCP Server."""
2
+
3
+ from .settings import Settings, get_settings
4
+
5
+ __all__ = ["Settings", "get_settings"]
src/config/settings.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Settings and configuration for the Job Search MCP Server."""
2
+
3
+ import os
4
+ from functools import lru_cache
5
+ from typing import Optional
6
+
7
+ from pydantic import BaseSettings, Field
8
+
9
+
10
+ class Settings(BaseSettings):
11
+ """Application settings and configuration."""
12
+
13
+ # API Keys
14
+ openai_api_key: Optional[str] = Field(default=None, env="OPENAI_API_KEY")
15
+ anthropic_api_key: Optional[str] = Field(default=None, env="ANTHROPIC_API_KEY")
16
+
17
+ # Job Search APIs
18
+ linkedin_api_key: Optional[str] = Field(default=None, env="LINKEDIN_API_KEY")
19
+ indeed_api_key: Optional[str] = Field(default=None, env="INDEED_API_KEY")
20
+
21
+ # Embedding Model Settings
22
+ embedding_model: str = Field(default="all-MiniLM-L6-v2", env="EMBEDDING_MODEL")
23
+ embedding_dimension: int = Field(default=384, env="EMBEDDING_DIMENSION")
24
+
25
+ # LLM Settings
26
+ llm_provider: str = Field(default="openai", env="LLM_PROVIDER") # openai, anthropic
27
+ llm_model: str = Field(default="gpt-3.5-turbo", env="LLM_MODEL")
28
+ max_tokens: int = Field(default=300, env="MAX_TOKENS")
29
+ temperature: float = Field(default=0.7, env="TEMPERATURE")
30
+
31
+ # Application Settings
32
+ app_name: str = Field(default="Job Search MCP Server", env="APP_NAME")
33
+ debug: bool = Field(default=False, env="DEBUG")
34
+ host: str = Field(default="127.0.0.1", env="HOST")
35
+ port: int = Field(default=7860, env="PORT")
36
+
37
+ # Storage Settings
38
+ profiles_db_path: str = Field(
39
+ default="./data/profiles.json", env="PROFILES_DB_PATH"
40
+ )
41
+ jobs_cache_path: str = Field(
42
+ default="./data/jobs_cache.json", env="JOBS_CACHE_PATH"
43
+ )
44
+ embeddings_cache_path: str = Field(
45
+ default="./data/embeddings.faiss", env="EMBEDDINGS_CACHE_PATH"
46
+ )
47
+
48
+ # Search Settings
49
+ max_jobs_per_search: int = Field(default=50, env="MAX_JOBS_PER_SEARCH")
50
+ similarity_threshold: float = Field(default=0.7, env="SIMILARITY_THRESHOLD")
51
+
52
+ class Config:
53
+ """Pydantic config."""
54
+
55
+ env_file = ".env"
56
+ env_file_encoding = "utf-8"
57
+
58
+
59
+ @lru_cache()
60
+ def get_settings() -> Settings:
61
+ """Get cached settings instance."""
62
+ return Settings()
src/services/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """Services module for Job Search MCP Server."""
2
+
3
+ from .embedding_service import EmbeddingService
4
+ from .llm_service import LLMService
5
+ from .job_search_service import JobSearchService
6
+ from .profile_service import ProfileService
7
+
8
+ __all__ = ["EmbeddingService", "LLMService", "JobSearchService", "ProfileService"]
src/services/embedding_service.py ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Embedding service for text embeddings and similarity search using GPU."""
2
+
3
+ import json
4
+ import os
5
+ from typing import Any, Dict, List, Optional, Tuple
6
+
7
+ import faiss
8
+ import numpy as np
9
+ from sentence_transformers import SentenceTransformer
10
+
11
+ from ..config import get_settings
12
+
13
+
14
+ class EmbeddingService:
15
+ """Service for text embeddings and similarity search."""
16
+
17
+ def __init__(self):
18
+ self.settings = get_settings()
19
+ self.model = None
20
+ self.index = None
21
+ self.job_metadata = {}
22
+ self._load_model()
23
+ self._load_index()
24
+
25
+ def _load_model(self):
26
+ """Load the sentence transformer model."""
27
+ try:
28
+ self.model = SentenceTransformer(self.settings.embedding_model)
29
+ # Use GPU if available
30
+ if hasattr(self.model, "to"):
31
+ import torch
32
+
33
+ device = "cuda" if torch.cuda.is_available() else "cpu"
34
+ self.model = self.model.to(device)
35
+ print(f"Embedding model loaded on: {device}")
36
+ except Exception as e:
37
+ print(f"Error loading embedding model: {e}")
38
+ self.model = None
39
+
40
+ def _load_index(self):
41
+ """Load or create FAISS index."""
42
+ try:
43
+ if os.path.exists(self.settings.embeddings_cache_path):
44
+ self.index = faiss.read_index(self.settings.embeddings_cache_path)
45
+ # Load metadata
46
+ metadata_path = self.settings.embeddings_cache_path.replace(
47
+ ".faiss", "_metadata.json"
48
+ )
49
+ if os.path.exists(metadata_path):
50
+ with open(metadata_path, "r", encoding="utf-8") as f:
51
+ self.job_metadata = json.load(f)
52
+ print(f"Loaded FAISS index with {self.index.ntotal} vectors")
53
+ else:
54
+ # Create new index
55
+ self.index = faiss.IndexFlatIP(self.settings.embedding_dimension)
56
+ print("Created new FAISS index")
57
+ except Exception as e:
58
+ print(f"Error loading FAISS index: {e}")
59
+ self.index = faiss.IndexFlatIP(self.settings.embedding_dimension)
60
+
61
+ def _save_index(self):
62
+ """Save FAISS index and metadata."""
63
+ try:
64
+ os.makedirs(
65
+ os.path.dirname(self.settings.embeddings_cache_path), exist_ok=True
66
+ )
67
+ faiss.write_index(self.index, self.settings.embeddings_cache_path)
68
+
69
+ # Save metadata
70
+ metadata_path = self.settings.embeddings_cache_path.replace(
71
+ ".faiss", "_metadata.json"
72
+ )
73
+ with open(metadata_path, "w", encoding="utf-8") as f:
74
+ json.dump(self.job_metadata, f, indent=2, default=str)
75
+
76
+ print(f"Saved FAISS index with {self.index.ntotal} vectors")
77
+ except Exception as e:
78
+ print(f"Error saving FAISS index: {e}")
79
+
80
+ def get_embedding(self, text: str) -> Optional[np.ndarray]:
81
+ """Get embedding for a single text."""
82
+ if not self.model:
83
+ return None
84
+
85
+ try:
86
+ embedding = self.model.encode([text])
87
+ # Normalize for cosine similarity (using Inner Product index)
88
+ embedding = embedding / np.linalg.norm(embedding, axis=1, keepdims=True)
89
+ return embedding[0]
90
+ except Exception as e:
91
+ print(f"Error generating embedding: {e}")
92
+ return None
93
+
94
+ def get_embeddings(self, texts: List[str]) -> Optional[np.ndarray]:
95
+ """Get embeddings for multiple texts."""
96
+ if not self.model:
97
+ return None
98
+
99
+ try:
100
+ embeddings = self.model.encode(texts, show_progress_bar=True)
101
+ # Normalize for cosine similarity
102
+ embeddings = embeddings / np.linalg.norm(embeddings, axis=1, keepdims=True)
103
+ return embeddings
104
+ except Exception as e:
105
+ print(f"Error generating embeddings: {e}")
106
+ return None
107
+
108
+ def add_job_embeddings(self, jobs: List[Dict[str, Any]]):
109
+ """Add job embeddings to the index."""
110
+ if not jobs:
111
+ return
112
+
113
+ # Prepare texts for embedding
114
+ texts = []
115
+ job_ids = []
116
+
117
+ for job in jobs:
118
+ # Combine job title, description, and requirements for embedding
119
+ job_text = f"{job.get('title', '')} {job.get('description', '')} {job.get('requirements', '')}"
120
+ texts.append(job_text)
121
+ job_ids.append(job.get("id", len(self.job_metadata)))
122
+
123
+ # Get embeddings
124
+ embeddings = self.get_embeddings(texts)
125
+ if embeddings is None:
126
+ return
127
+
128
+ # Add to index
129
+ self.index.add(embeddings.astype("float32"))
130
+
131
+ # Store metadata
132
+ for i, job in enumerate(jobs):
133
+ job_id = job_ids[i]
134
+ self.job_metadata[str(len(self.job_metadata))] = {
135
+ "job_id": job_id,
136
+ "title": job.get("title", ""),
137
+ "company": job.get("company", ""),
138
+ "location": job.get("location", ""),
139
+ "salary": job.get("salary", ""),
140
+ "url": job.get("url", ""),
141
+ "posted_date": job.get("posted_date", ""),
142
+ "job_type": job.get("job_type", ""),
143
+ "description": job.get("description", "")[:500], # Truncate for storage
144
+ }
145
+
146
+ self._save_index()
147
+ print(f"Added {len(jobs)} job embeddings to index")
148
+
149
+ def search_similar_jobs(
150
+ self, profile_text: str, k: int = 20
151
+ ) -> List[Tuple[Dict[str, Any], float]]:
152
+ """
153
+ Search for jobs similar to user profile.
154
+
155
+ Args:
156
+ profile_text: Combined user profile text for matching
157
+ k: Number of top results to return
158
+
159
+ Returns:
160
+ List of tuples (job_metadata, similarity_score)
161
+ """
162
+ if not self.index or self.index.ntotal == 0:
163
+ return []
164
+
165
+ # Get profile embedding
166
+ profile_embedding = self.get_embedding(profile_text)
167
+ if profile_embedding is None:
168
+ return []
169
+
170
+ # Search index
171
+ try:
172
+ scores, indices = self.index.search(
173
+ profile_embedding[np.newaxis].astype("float32"),
174
+ min(k, self.index.ntotal),
175
+ )
176
+
177
+ results = []
178
+ for i, (score, idx) in enumerate(zip(scores[0], indices[0])):
179
+ if idx == -1: # Invalid index
180
+ continue
181
+
182
+ job_meta = self.job_metadata.get(str(idx))
183
+ if job_meta:
184
+ results.append((job_meta, float(score)))
185
+
186
+ return results
187
+
188
+ except Exception as e:
189
+ print(f"Error searching similar jobs: {e}")
190
+ return []
191
+
192
+ def clear_index(self):
193
+ """Clear the index and metadata."""
194
+ self.index = faiss.IndexFlatIP(self.settings.embedding_dimension)
195
+ self.job_metadata = {}
196
+ self._save_index()
197
+ print("Cleared job embeddings index")
198
+
199
+ def get_index_stats(self) -> Dict[str, Any]:
200
+ """Get statistics about the index."""
201
+ return {
202
+ "total_jobs": self.index.ntotal if self.index else 0,
203
+ "embedding_dimension": self.settings.embedding_dimension,
204
+ "model_name": self.settings.embedding_model,
205
+ "metadata_count": len(self.job_metadata),
206
+ }
src/services/job_search_service.py ADDED
@@ -0,0 +1,362 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Job search service for fetching and ranking job posts."""
2
+
3
+ import json
4
+ import os
5
+ from datetime import datetime, timedelta
6
+ from typing import Any, Dict, List, Optional
7
+ from urllib.parse import quote
8
+
9
+ import requests
10
+ from bs4 import BeautifulSoup
11
+
12
+ from ..config import get_settings
13
+ from .embedding_service import EmbeddingService
14
+ from .profile_service import ProfileService, UserProfile
15
+
16
+
17
+ class JobSearchService:
18
+ """Service for searching and ranking job posts."""
19
+
20
+ def __init__(self):
21
+ self.settings = get_settings()
22
+ self.embedding_service = EmbeddingService()
23
+ self.profile_service = ProfileService()
24
+ self.jobs_cache = {}
25
+ self._load_cache()
26
+
27
+ def _load_cache(self):
28
+ """Load jobs cache from file."""
29
+ try:
30
+ if os.path.exists(self.settings.jobs_cache_path):
31
+ with open(self.settings.jobs_cache_path, "r", encoding="utf-8") as f:
32
+ self.jobs_cache = json.load(f)
33
+ except Exception as e:
34
+ print(f"Error loading jobs cache: {e}")
35
+ self.jobs_cache = {}
36
+
37
+ def _save_cache(self):
38
+ """Save jobs cache to file."""
39
+ try:
40
+ os.makedirs(os.path.dirname(self.settings.jobs_cache_path), exist_ok=True)
41
+ with open(self.settings.jobs_cache_path, "w", encoding="utf-8") as f:
42
+ json.dump(self.jobs_cache, f, indent=2, default=str)
43
+ except Exception as e:
44
+ print(f"Error saving jobs cache: {e}")
45
+
46
+ def _fetch_indeed_jobs(
47
+ self, query: str, location: str, job_type: str
48
+ ) -> List[Dict[str, Any]]:
49
+ """Fetch jobs from Indeed (web scraping - for demo purposes)."""
50
+ jobs = []
51
+
52
+ try:
53
+ # Construct Indeed search URL
54
+ base_url = "https://www.indeed.com/jobs"
55
+ params = {
56
+ "q": query,
57
+ "l": location,
58
+ "jt": job_type
59
+ if job_type in ["fulltime", "parttime", "contract"]
60
+ else "",
61
+ "limit": min(self.settings.max_jobs_per_search, 50),
62
+ }
63
+
64
+ # Build URL
65
+ url = f"{base_url}?" + "&".join(
66
+ [f"{k}={quote(str(v))}" for k, v in params.items() if v]
67
+ )
68
+
69
+ # Headers to mimic browser request
70
+ headers = {
71
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
72
+ }
73
+
74
+ # Make request with timeout
75
+ response = requests.get(url, headers=headers, timeout=10)
76
+ response.raise_for_status()
77
+
78
+ # Parse HTML
79
+ soup = BeautifulSoup(response.content, "html.parser")
80
+
81
+ # Find job cards (Indeed's structure may change)
82
+ job_cards = soup.find_all("div", class_="job_seen_beacon")
83
+
84
+ for i, card in enumerate(job_cards[: self.settings.max_jobs_per_search]):
85
+ try:
86
+ # Extract job information
87
+ title_elem = card.find("h2", class_="jobTitle")
88
+ title = (
89
+ title_elem.get_text(strip=True)
90
+ if title_elem
91
+ else f"Position {i+1}"
92
+ )
93
+
94
+ company_elem = card.find("span", class_="companyName")
95
+ company = (
96
+ company_elem.get_text(strip=True)
97
+ if company_elem
98
+ else "Unknown Company"
99
+ )
100
+
101
+ location_elem = card.find("div", class_="companyLocation")
102
+ job_location = (
103
+ location_elem.get_text(strip=True)
104
+ if location_elem
105
+ else location
106
+ )
107
+
108
+ summary_elem = card.find("div", class_="job-snippet")
109
+ summary = summary_elem.get_text(strip=True) if summary_elem else ""
110
+
111
+ # Try to get job URL
112
+ link_elem = title_elem.find("a") if title_elem else None
113
+ job_url = (
114
+ f"https://www.indeed.com{link_elem['href']}"
115
+ if link_elem and link_elem.get("href")
116
+ else ""
117
+ )
118
+
119
+ job = {
120
+ "id": f"indeed_{i}_{hash(title + company)}",
121
+ "title": title,
122
+ "company": company,
123
+ "location": job_location,
124
+ "description": summary,
125
+ "requirements": "", # Would need individual job page scraping
126
+ "salary": "", # Would need more detailed scraping
127
+ "url": job_url,
128
+ "posted_date": datetime.now().isoformat(),
129
+ "job_type": job_type or "full-time",
130
+ "source": "indeed",
131
+ }
132
+
133
+ jobs.append(job)
134
+
135
+ except Exception as e:
136
+ print(f"Error parsing job card {i}: {e}")
137
+ continue
138
+
139
+ except Exception as e:
140
+ print(f"Error fetching Indeed jobs: {e}")
141
+
142
+ return jobs
143
+
144
+ def _fetch_sample_jobs(
145
+ self, query: str, location: str, job_type: str
146
+ ) -> List[Dict[str, Any]]:
147
+ """Generate sample jobs for demo purposes."""
148
+ sample_jobs = [
149
+ {
150
+ "id": "sample_1",
151
+ "title": f"Senior {query} Developer",
152
+ "company": "TechCorp Solutions",
153
+ "location": location or "Remote",
154
+ "description": f"We are looking for an experienced {query} developer to join our dynamic team. You will work on cutting-edge projects using modern technologies and best practices.",
155
+ "requirements": f"5+ years experience with {query}, strong problem-solving skills, team collaboration",
156
+ "salary": "$80,000 - $120,000",
157
+ "url": "https://example.com/job1",
158
+ "posted_date": datetime.now().isoformat(),
159
+ "job_type": job_type or "full-time",
160
+ "source": "sample",
161
+ },
162
+ {
163
+ "id": "sample_2",
164
+ "title": f"{query} Engineer",
165
+ "company": "InnovateLabs",
166
+ "location": location or "New York, NY",
167
+ "description": f"Join our team as a {query} engineer and help build the next generation of applications. Great opportunity for growth and learning.",
168
+ "requirements": f"3+ years {query} experience, bachelor's degree preferred, excellent communication skills",
169
+ "salary": "$70,000 - $95,000",
170
+ "url": "https://example.com/job2",
171
+ "posted_date": (datetime.now() - timedelta(days=1)).isoformat(),
172
+ "job_type": job_type or "full-time",
173
+ "source": "sample",
174
+ },
175
+ {
176
+ "id": "sample_3",
177
+ "title": f"Junior {query} Specialist",
178
+ "company": "StartupXYZ",
179
+ "location": location or "San Francisco, CA",
180
+ "description": f"Great entry-level opportunity for {query} enthusiasts. We offer mentorship, training, and a collaborative environment.",
181
+ "requirements": f"1-2 years {query} experience, willingness to learn, passion for technology",
182
+ "salary": "$55,000 - $75,000",
183
+ "url": "https://example.com/job3",
184
+ "posted_date": (datetime.now() - timedelta(days=2)).isoformat(),
185
+ "job_type": job_type or "full-time",
186
+ "source": "sample",
187
+ },
188
+ ]
189
+
190
+ return sample_jobs
191
+
192
+ def search_jobs(
193
+ self, user_id: str, query: str = "", location: str = "", job_type: str = ""
194
+ ) -> Dict[str, Any]:
195
+ """
196
+ Search for jobs and rank them by relevance to user profile.
197
+
198
+ Args:
199
+ user_id: User identifier for personalization
200
+ query: Job search query/keywords
201
+ location: Preferred job location
202
+ job_type: Type of job (full-time, contract, remote, etc.)
203
+
204
+ Returns:
205
+ Dict with ranked job listings and fit scores
206
+ """
207
+ try:
208
+ # Get user profile
209
+ profile = self.profile_service.get_profile(user_id)
210
+ if not profile:
211
+ return {
212
+ "success": False,
213
+ "message": "User profile not found. Please create a profile first.",
214
+ }
215
+
216
+ # Create cache key
217
+ cache_key = f"{query}_{location}_{job_type}"
218
+ current_time = datetime.now()
219
+
220
+ # Check cache (expire after 1 hour)
221
+ if cache_key in self.jobs_cache:
222
+ cached_data = self.jobs_cache[cache_key]
223
+ cache_time = datetime.fromisoformat(cached_data["timestamp"])
224
+ if current_time - cache_time < timedelta(hours=1):
225
+ fresh_jobs = cached_data["jobs"]
226
+ else:
227
+ fresh_jobs = self._fetch_fresh_jobs(query, location, job_type)
228
+ self.jobs_cache[cache_key] = {
229
+ "jobs": fresh_jobs,
230
+ "timestamp": current_time.isoformat(),
231
+ }
232
+ self._save_cache()
233
+ else:
234
+ fresh_jobs = self._fetch_fresh_jobs(query, location, job_type)
235
+ self.jobs_cache[cache_key] = {
236
+ "jobs": fresh_jobs,
237
+ "timestamp": current_time.isoformat(),
238
+ }
239
+ self._save_cache()
240
+
241
+ if not fresh_jobs:
242
+ return {
243
+ "success": True,
244
+ "jobs": [],
245
+ "total_found": 0,
246
+ "message": "No jobs found for the given criteria",
247
+ "search_params": {
248
+ "query": query,
249
+ "location": location,
250
+ "job_type": job_type,
251
+ },
252
+ }
253
+
254
+ # Add jobs to embedding index
255
+ self.embedding_service.add_job_embeddings(fresh_jobs)
256
+
257
+ # Create profile text for matching
258
+ profile_text = (
259
+ f"{' '.join(profile.skills)} {profile.career_goals} {profile.resume}"
260
+ )
261
+
262
+ # Search for similar jobs
263
+ similar_jobs = self.embedding_service.search_similar_jobs(
264
+ profile_text, k=min(len(fresh_jobs), self.settings.max_jobs_per_search)
265
+ )
266
+
267
+ # Format results with fit scores
268
+ ranked_jobs = []
269
+ for job_meta, similarity_score in similar_jobs:
270
+ # Find the full job data
271
+ full_job = next(
272
+ (job for job in fresh_jobs if job["id"] == job_meta["job_id"]), None
273
+ )
274
+ if full_job:
275
+ # Calculate fit percentage (similarity score is cosine similarity)
276
+ fit_percentage = max(0, min(100, int(similarity_score * 100)))
277
+
278
+ ranked_job = {
279
+ **full_job,
280
+ "fit_score": fit_percentage,
281
+ "match_reasons": self._get_match_reasons(
282
+ profile, full_job, similarity_score
283
+ ),
284
+ }
285
+ ranked_jobs.append(ranked_job)
286
+
287
+ # Sort by fit score
288
+ ranked_jobs.sort(key=lambda x: x["fit_score"], reverse=True)
289
+
290
+ return {
291
+ "success": True,
292
+ "jobs": ranked_jobs,
293
+ "total_found": len(ranked_jobs),
294
+ "search_params": {
295
+ "query": query,
296
+ "location": location,
297
+ "job_type": job_type,
298
+ },
299
+ "user_profile": {
300
+ "skills_count": len(profile.skills),
301
+ "location": profile.location,
302
+ },
303
+ }
304
+
305
+ except Exception as e:
306
+ return {"success": False, "message": f"Error searching jobs: {str(e)}"}
307
+
308
+ def _fetch_fresh_jobs(
309
+ self, query: str, location: str, job_type: str
310
+ ) -> List[Dict[str, Any]]:
311
+ """Fetch fresh jobs from multiple sources."""
312
+ all_jobs = []
313
+
314
+ # Try Indeed first (commented out for demo - requires careful rate limiting)
315
+ # indeed_jobs = self._fetch_indeed_jobs(query, location, job_type)
316
+ # all_jobs.extend(indeed_jobs)
317
+
318
+ # Use sample jobs for demo
319
+ sample_jobs = self._fetch_sample_jobs(query, location, job_type)
320
+ all_jobs.extend(sample_jobs)
321
+
322
+ # TODO: Add more job sources
323
+ # - LinkedIn API (requires partnership)
324
+ # - AngelList API
325
+ # - GitHub Jobs API
326
+ # - Company career pages scraping
327
+
328
+ return all_jobs
329
+
330
+ def _get_match_reasons(
331
+ self, profile: UserProfile, job: Dict[str, Any], similarity_score: float
332
+ ) -> List[str]:
333
+ """Generate reasons why this job matches the user profile."""
334
+ reasons = []
335
+
336
+ # Check skill matches
337
+ job_text = f"{job['title']} {job['description']} {job['requirements']}".lower()
338
+ matching_skills = [
339
+ skill for skill in profile.skills if skill.lower() in job_text
340
+ ]
341
+
342
+ if matching_skills:
343
+ reasons.append(f"Skills match: {', '.join(matching_skills[:3])}")
344
+
345
+ # Check location preference
346
+ if profile.location and profile.location.lower() in job["location"].lower():
347
+ reasons.append("Location preference match")
348
+
349
+ # Check job type preference
350
+ if similarity_score > 0.8:
351
+ reasons.append("High relevance to your background")
352
+ elif similarity_score > 0.6:
353
+ reasons.append("Good match for your experience")
354
+
355
+ # Check career goals alignment
356
+ if any(
357
+ goal_word in job_text
358
+ for goal_word in profile.career_goals.lower().split()[:5]
359
+ ):
360
+ reasons.append("Aligns with career goals")
361
+
362
+ return reasons[:3] # Limit to top 3 reasons
src/services/llm_service.py ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """LLM service for generating cover letters and Q&A responses."""
2
+
3
+ from typing import Dict, Any, Optional
4
+ import openai
5
+ from anthropic import Anthropic
6
+
7
+ from ..config import get_settings
8
+ from .profile_service import UserProfile
9
+
10
+
11
+ class LLMService:
12
+ """Service for LLM-based text generation."""
13
+
14
+ def __init__(self):
15
+ self.settings = get_settings()
16
+ self.openai_client = None
17
+ self.anthropic_client = None
18
+ self._initialize_clients()
19
+
20
+ def _initialize_clients(self):
21
+ """Initialize LLM clients based on available API keys."""
22
+ if self.settings.openai_api_key:
23
+ openai.api_key = self.settings.openai_api_key
24
+ self.openai_client = openai
25
+
26
+ if self.settings.anthropic_api_key:
27
+ self.anthropic_client = Anthropic(api_key=self.settings.anthropic_api_key)
28
+
29
+ def _get_client(self):
30
+ """Get the appropriate LLM client based on provider setting."""
31
+ if self.settings.llm_provider == "openai" and self.openai_client:
32
+ return self.openai_client, "openai"
33
+ elif self.settings.llm_provider == "anthropic" and self.anthropic_client:
34
+ return self.anthropic_client, "anthropic"
35
+ elif self.openai_client:
36
+ return self.openai_client, "openai"
37
+ elif self.anthropic_client:
38
+ return self.anthropic_client, "anthropic"
39
+ else:
40
+ return None, None
41
+
42
+ def _call_openai(self, messages: list, max_tokens: int = None) -> Optional[str]:
43
+ """Call OpenAI API."""
44
+ try:
45
+ response = self.openai_client.ChatCompletion.create(
46
+ model=self.settings.llm_model,
47
+ messages=messages,
48
+ max_tokens=max_tokens or self.settings.max_tokens,
49
+ temperature=self.settings.temperature,
50
+ )
51
+ return response.choices[0].message.content.strip()
52
+ except Exception as e:
53
+ print(f"OpenAI API error: {e}")
54
+ return None
55
+
56
+ def _call_anthropic(self, messages: list, max_tokens: int = None) -> Optional[str]:
57
+ """Call Anthropic API."""
58
+ try:
59
+ # Convert messages format for Anthropic
60
+ system_message = ""
61
+ user_messages = []
62
+
63
+ for msg in messages:
64
+ if msg["role"] == "system":
65
+ system_message = msg["content"]
66
+ else:
67
+ user_messages.append(msg)
68
+
69
+ # Combine user messages
70
+ combined_content = "\n\n".join([msg["content"] for msg in user_messages])
71
+
72
+ response = self.anthropic_client.messages.create(
73
+ model="claude-3-haiku-20240307", # Using a lighter model for cost efficiency
74
+ system=system_message,
75
+ messages=[{"role": "user", "content": combined_content}],
76
+ max_tokens=max_tokens or self.settings.max_tokens,
77
+ temperature=self.settings.temperature,
78
+ )
79
+ return response.content[0].text.strip()
80
+ except Exception as e:
81
+ print(f"Anthropic API error: {e}")
82
+ return None
83
+
84
+ def generate_text(self, messages: list, max_tokens: int = None) -> Optional[str]:
85
+ """Generate text using the configured LLM."""
86
+ client, provider = self._get_client()
87
+ if not client:
88
+ return None
89
+
90
+ if provider == "openai":
91
+ return self._call_openai(messages, max_tokens)
92
+ elif provider == "anthropic":
93
+ return self._call_anthropic(messages, max_tokens)
94
+
95
+ return None
96
+
97
+ def generate_cover_letter(
98
+ self, profile: UserProfile, job_description: str, tone: str = "professional"
99
+ ) -> Dict[str, Any]:
100
+ """
101
+ Generate a personalized cover letter.
102
+
103
+ Args:
104
+ profile: User profile containing skills, experience, etc.
105
+ job_description: The job posting description
106
+ tone: Desired tone (professional, casual, enthusiastic, formal)
107
+
108
+ Returns:
109
+ Dict with generated cover letter and metadata
110
+ """
111
+ try:
112
+ # Prepare context
113
+ skills_text = ", ".join(profile.skills[:10]) # Limit skills for brevity
114
+
115
+ # Create prompt messages
116
+ system_prompt = f"""You are an expert cover letter writer. Generate a compelling, personalized cover letter that:
117
+ 1. Is concise (under 300 words)
118
+ 2. Matches the {tone} tone
119
+ 3. Highlights relevant skills and experience
120
+ 4. Shows enthusiasm for the specific role
121
+ 5. Includes a strong opening and closing
122
+ 6. Avoids generic templates
123
+
124
+ Format as a professional cover letter without date/address headers."""
125
+
126
+ user_prompt = f"""Generate a cover letter for this job:
127
+
128
+ JOB DESCRIPTION:
129
+ {job_description[:2000]} # Limit to avoid token limits
130
+
131
+ CANDIDATE PROFILE:
132
+ - Skills: {skills_text}
133
+ - Experience: {profile.experience_level or 'Not specified'}
134
+ - Career Goals: {profile.career_goals}
135
+ - Location: {profile.location or 'Not specified'}
136
+ - Education: {profile.education or 'Not specified'}
137
+
138
+ RESUME SUMMARY:
139
+ {profile.resume[:1000]} # Limit resume text
140
+
141
+ Tone: {tone}
142
+
143
+ Write a cover letter that specifically connects the candidate's background to this job opportunity."""
144
+
145
+ messages = [
146
+ {"role": "system", "content": system_prompt},
147
+ {"role": "user", "content": user_prompt},
148
+ ]
149
+
150
+ # Generate cover letter
151
+ cover_letter = self.generate_text(messages, max_tokens=400)
152
+
153
+ if cover_letter:
154
+ return {
155
+ "success": True,
156
+ "cover_letter": cover_letter,
157
+ "tone_used": tone,
158
+ "word_count": len(cover_letter.split()),
159
+ "character_count": len(cover_letter),
160
+ }
161
+ else:
162
+ return {"success": False, "message": "Failed to generate cover letter"}
163
+
164
+ except Exception as e:
165
+ return {
166
+ "success": False,
167
+ "message": f"Error generating cover letter: {str(e)}",
168
+ }
169
+
170
+ def generate_qa_response(
171
+ self, profile: UserProfile, question: str, context: str = ""
172
+ ) -> Dict[str, Any]:
173
+ """
174
+ Generate a response to an interview question or client inquiry.
175
+
176
+ Args:
177
+ profile: User profile for personalization
178
+ question: The question to answer
179
+ context: Additional context about the conversation
180
+
181
+ Returns:
182
+ Dict with generated response and confidence score
183
+ """
184
+ try:
185
+ # Create prompt
186
+ system_prompt = """You are a professional career advisor helping someone answer interview questions and client inquiries.
187
+
188
+ Generate responses that are:
189
+ 1. Concise (under 150 words)
190
+ 2. Professional and confident
191
+ 3. Specific to the person's background
192
+ 4. Include relevant examples when possible
193
+ 5. End with enthusiasm or next steps
194
+
195
+ Avoid generic answers and be authentic."""
196
+
197
+ user_prompt = f"""Answer this question professionally:
198
+
199
+ QUESTION: {question}
200
+
201
+ CONTEXT: {context}
202
+
203
+ CANDIDATE BACKGROUND:
204
+ - Skills: {", ".join(profile.skills[:8])}
205
+ - Experience Level: {profile.experience_level or 'Not specified'}
206
+ - Career Goals: {profile.career_goals}
207
+ - Key Background: {profile.resume[:800]}
208
+
209
+ Provide a specific, personalized answer that showcases relevant experience and skills."""
210
+
211
+ messages = [
212
+ {"role": "system", "content": system_prompt},
213
+ {"role": "user", "content": user_prompt},
214
+ ]
215
+
216
+ # Generate response
217
+ response = self.generate_text(messages, max_tokens=200)
218
+
219
+ if response:
220
+ # Simple confidence scoring based on response characteristics
221
+ word_count = len(response.split())
222
+ has_examples = any(
223
+ keyword in response.lower()
224
+ for keyword in [
225
+ "experience",
226
+ "project",
227
+ "worked",
228
+ "developed",
229
+ "managed",
230
+ ]
231
+ )
232
+ has_specific_skills = any(
233
+ skill.lower() in response.lower() for skill in profile.skills[:5]
234
+ )
235
+
236
+ confidence_score = 0.6 # Base score
237
+ if has_examples:
238
+ confidence_score += 0.2
239
+ if has_specific_skills:
240
+ confidence_score += 0.15
241
+ if 50 <= word_count <= 150:
242
+ confidence_score += 0.05
243
+
244
+ confidence_score = min(confidence_score, 1.0)
245
+
246
+ return {
247
+ "success": True,
248
+ "response": response,
249
+ "confidence_score": round(confidence_score, 2),
250
+ "word_count": word_count,
251
+ }
252
+ else:
253
+ return {"success": False, "message": "Failed to generate response"}
254
+
255
+ except Exception as e:
256
+ return {"success": False, "message": f"Error generating response: {str(e)}"}
src/services/profile_service.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Profile service for managing user profiles and data."""
2
+
3
+ import json
4
+ import os
5
+ from datetime import datetime
6
+ from typing import Any, Dict, Optional
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+ from ..config import get_settings
11
+
12
+
13
+ class UserProfile(BaseModel):
14
+ """User profile data model."""
15
+
16
+ user_id: str
17
+ resume: str
18
+ skills: list[str]
19
+ salary_wish: str
20
+ career_goals: str
21
+ experience_level: Optional[str] = None
22
+ location: Optional[str] = None
23
+ education: Optional[str] = None
24
+ certifications: Optional[list[str]] = None
25
+ created_at: datetime = Field(default_factory=datetime.now)
26
+ updated_at: datetime = Field(default_factory=datetime.now)
27
+
28
+
29
+ class ProfileService:
30
+ """Service for managing user profiles."""
31
+
32
+ def __init__(self):
33
+ self.settings = get_settings()
34
+ self.profiles_path = self.settings.profiles_db_path
35
+ self._ensure_data_dir()
36
+
37
+ def _ensure_data_dir(self):
38
+ """Ensure data directory exists."""
39
+ os.makedirs(os.path.dirname(self.profiles_path), exist_ok=True)
40
+
41
+ def _load_profiles(self) -> Dict[str, Dict[str, Any]]:
42
+ """Load profiles from file."""
43
+ if not os.path.exists(self.profiles_path):
44
+ return {}
45
+
46
+ try:
47
+ with open(self.profiles_path, "r", encoding="utf-8") as f:
48
+ return json.load(f)
49
+ except (json.JSONDecodeError, FileNotFoundError):
50
+ return {}
51
+
52
+ def _save_profiles(self, profiles: Dict[str, Dict[str, Any]]):
53
+ """Save profiles to file."""
54
+ with open(self.profiles_path, "w", encoding="utf-8") as f:
55
+ json.dump(profiles, f, indent=2, default=str)
56
+
57
+ def upsert_profile(self, user_id: str, profile_data: str) -> Dict[str, Any]:
58
+ """
59
+ Store or update user profile.
60
+
61
+ Args:
62
+ user_id: Unique user identifier
63
+ profile_data: JSON string containing profile information
64
+
65
+ Returns:
66
+ Dict with operation result
67
+ """
68
+ try:
69
+ # Parse profile data
70
+ profile_dict = json.loads(profile_data)
71
+
72
+ # Validate required fields
73
+ required_fields = ["resume", "skills", "salary_wish", "career_goals"]
74
+ missing_fields = [
75
+ field for field in required_fields if field not in profile_dict
76
+ ]
77
+
78
+ if missing_fields:
79
+ return {
80
+ "success": False,
81
+ "message": f"Missing required fields: {', '.join(missing_fields)}",
82
+ }
83
+
84
+ # Create profile model
85
+ profile_dict["user_id"] = user_id
86
+ profile = UserProfile(**profile_dict)
87
+
88
+ # Load existing profiles
89
+ profiles = self._load_profiles()
90
+
91
+ # Update timestamp if profile exists
92
+ if user_id in profiles:
93
+ profile.updated_at = datetime.now()
94
+
95
+ # Store profile
96
+ profiles[user_id] = profile.dict()
97
+ self._save_profiles(profiles)
98
+
99
+ return {
100
+ "success": True,
101
+ "message": "Profile updated successfully",
102
+ "user_id": user_id,
103
+ "profile": {
104
+ "skills_count": len(profile.skills),
105
+ "updated_at": profile.updated_at.isoformat(),
106
+ },
107
+ }
108
+
109
+ except json.JSONDecodeError:
110
+ return {"success": False, "message": "Invalid JSON format in profile data"}
111
+ except Exception as e:
112
+ return {"success": False, "message": f"Error updating profile: {str(e)}"}
113
+
114
+ def get_profile(self, user_id: str) -> Optional[UserProfile]:
115
+ """Get user profile by ID."""
116
+ profiles = self._load_profiles()
117
+ profile_data = profiles.get(user_id)
118
+
119
+ if profile_data:
120
+ return UserProfile(**profile_data)
121
+ return None
122
+
123
+ def delete_profile(self, user_id: str) -> bool:
124
+ """Delete user profile."""
125
+ profiles = self._load_profiles()
126
+ if user_id in profiles:
127
+ del profiles[user_id]
128
+ self._save_profiles(profiles)
129
+ return True
130
+ return False
131
+
132
+ def list_profiles(self) -> list[str]:
133
+ """List all user IDs."""
134
+ profiles = self._load_profiles()
135
+ return list(profiles.keys())
src/tools/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """Tools module for Job Search MCP Server."""
2
+
3
+ from .profile_tool import ProfileTool
4
+ from .job_search_tool import JobSearchTool
5
+ from .cover_letter_tool import CoverLetterTool
6
+ from .qa_tool import QATool
7
+
8
+ __all__ = ["ProfileTool", "JobSearchTool", "CoverLetterTool", "QATool"]
src/tools/cover_letter_tool.py ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Cover letter tool for generating personalized cover letters - letter.generate endpoint."""
2
+
3
+ from typing import Dict, Any
4
+
5
+ from ..services import LLMService, ProfileService
6
+
7
+
8
+ class CoverLetterTool:
9
+ """Tool for generating personalized cover letters."""
10
+
11
+ def __init__(self):
12
+ self.llm_service = LLMService()
13
+ self.profile_service = ProfileService()
14
+
15
+ def generate(
16
+ self, user_id: str, job_description: str, tone: str = "professional"
17
+ ) -> Dict[str, Any]:
18
+ """
19
+ Generate a personalized cover letter using LLM.
20
+
21
+ This is the main letter.generate endpoint that calls an LLM to create
22
+ a short, personalized cover letter in any tone.
23
+
24
+ Args:
25
+ user_id: User identifier to access profile for personalization
26
+ job_description: The job posting description to tailor the letter to
27
+ tone: Tone of the cover letter
28
+ Options: "professional", "casual", "enthusiastic", "formal"
29
+ Default: "professional"
30
+
31
+ Returns:
32
+ Dict with generated cover letter and metadata:
33
+ {
34
+ "success": True,
35
+ "cover_letter": "Dear Hiring Manager,\n\nI am writing to express...",
36
+ "tone_used": "professional",
37
+ "word_count": 245,
38
+ "character_count": 1456,
39
+ "estimated_reading_time": "1 minute"
40
+ }
41
+ """
42
+ try:
43
+ # Get user profile
44
+ profile = self.profile_service.get_profile(user_id)
45
+ if not profile:
46
+ return {
47
+ "success": False,
48
+ "message": "User profile not found. Please create a profile first.",
49
+ }
50
+
51
+ # Validate tone
52
+ valid_tones = ["professional", "casual", "enthusiastic", "formal"]
53
+ if tone not in valid_tones:
54
+ return {
55
+ "success": False,
56
+ "message": f"Invalid tone. Must be one of: {', '.join(valid_tones)}",
57
+ }
58
+
59
+ # Validate job description
60
+ if not job_description or len(job_description.strip()) < 50:
61
+ return {
62
+ "success": False,
63
+ "message": "Job description must be at least 50 characters long",
64
+ }
65
+
66
+ # Generate cover letter
67
+ result = self.llm_service.generate_cover_letter(
68
+ profile, job_description, tone
69
+ )
70
+
71
+ # Add additional metadata if successful
72
+ if result.get("success") and result.get("cover_letter"):
73
+ word_count = result.get("word_count", 0)
74
+ result["estimated_reading_time"] = self._estimate_reading_time(
75
+ word_count
76
+ )
77
+ result["tips"] = self._get_cover_letter_tips(tone)
78
+
79
+ return result
80
+
81
+ except Exception as e:
82
+ return {
83
+ "success": False,
84
+ "message": f"Error generating cover letter: {str(e)}",
85
+ }
86
+
87
+ def generate_multiple_tones(
88
+ self, user_id: str, job_description: str, tones: list[str] = None
89
+ ) -> Dict[str, Any]:
90
+ """
91
+ Generate cover letters in multiple tones for comparison.
92
+
93
+ Args:
94
+ user_id: User identifier
95
+ job_description: Job posting description
96
+ tones: List of tones to generate (default: ["professional", "enthusiastic"])
97
+
98
+ Returns:
99
+ Dict with multiple cover letters in different tones
100
+ """
101
+ if tones is None:
102
+ tones = ["professional", "enthusiastic"]
103
+
104
+ results = {}
105
+ errors = []
106
+
107
+ for tone in tones:
108
+ result = self.generate(user_id, job_description, tone)
109
+ if result.get("success"):
110
+ results[tone] = result
111
+ else:
112
+ errors.append(f"{tone}: {result.get('message', 'Unknown error')}")
113
+
114
+ if results:
115
+ return {
116
+ "success": True,
117
+ "cover_letters": results,
118
+ "tones_generated": list(results.keys()),
119
+ "errors": errors if errors else None,
120
+ }
121
+ else:
122
+ return {
123
+ "success": False,
124
+ "message": "Failed to generate any cover letters",
125
+ "errors": errors,
126
+ }
127
+
128
+ def customize_for_company(
129
+ self,
130
+ user_id: str,
131
+ job_description: str,
132
+ company_info: str,
133
+ tone: str = "professional",
134
+ ) -> Dict[str, Any]:
135
+ """
136
+ Generate a cover letter with additional company-specific customization.
137
+
138
+ Args:
139
+ user_id: User identifier
140
+ job_description: Job posting description
141
+ company_info: Additional information about the company
142
+ tone: Tone for the cover letter
143
+
144
+ Returns:
145
+ Dict with customized cover letter
146
+ """
147
+ try:
148
+ # Enhance job description with company info
149
+ enhanced_description = (
150
+ f"{job_description}\n\nCompany Information:\n{company_info}"
151
+ )
152
+
153
+ # Generate the cover letter
154
+ result = self.generate(user_id, enhanced_description, tone)
155
+
156
+ if result.get("success"):
157
+ result["customization"] = "Company-specific information included"
158
+
159
+ return result
160
+
161
+ except Exception as e:
162
+ return {
163
+ "success": False,
164
+ "message": f"Error generating customized cover letter: {str(e)}",
165
+ }
166
+
167
+ def _estimate_reading_time(self, word_count: int) -> str:
168
+ """Estimate reading time based on word count (average 200 words per minute)."""
169
+ if word_count == 0:
170
+ return "0 minutes"
171
+
172
+ minutes = max(1, round(word_count / 200))
173
+ if minutes == 1:
174
+ return "1 minute"
175
+ else:
176
+ return f"{minutes} minutes"
177
+
178
+ def _get_cover_letter_tips(self, tone: str) -> list[str]:
179
+ """Get tone-specific tips for cover letters."""
180
+ tips_by_tone = {
181
+ "professional": [
182
+ "Keep it concise and focused on achievements",
183
+ "Use formal language and proper business format",
184
+ "Highlight quantifiable results when possible",
185
+ ],
186
+ "casual": [
187
+ "Show personality while remaining respectful",
188
+ "Use conversational language but avoid slang",
189
+ "Focus on cultural fit and team collaboration",
190
+ ],
191
+ "enthusiastic": [
192
+ "Express genuine excitement for the role",
193
+ "Use energetic language and positive adjectives",
194
+ "Show passion for the company's mission",
195
+ ],
196
+ "formal": [
197
+ "Follow strict business letter format",
198
+ "Use traditional and respectful language",
199
+ "Emphasize credentials and qualifications",
200
+ ],
201
+ }
202
+
203
+ return tips_by_tone.get(
204
+ tone, ["Tailor the letter to the specific job and company"]
205
+ )
206
+
207
+ def get_cover_letter_template(self, tone: str = "professional") -> Dict[str, Any]:
208
+ """
209
+ Get a basic cover letter template for the specified tone.
210
+
211
+ Args:
212
+ tone: Desired tone for the template
213
+
214
+ Returns:
215
+ Dict with template structure and guidelines
216
+ """
217
+ templates = {
218
+ "professional": {
219
+ "structure": [
220
+ "Header with your contact information",
221
+ "Date and employer contact information",
222
+ "Professional greeting",
223
+ "Opening paragraph: Position and brief introduction",
224
+ "Body paragraph 1: Relevant experience and skills",
225
+ "Body paragraph 2: Achievements and value proposition",
226
+ "Closing paragraph: Next steps and gratitude",
227
+ "Professional closing",
228
+ ],
229
+ "sample_opening": "I am writing to express my strong interest in the [Position Title] role at [Company Name].",
230
+ "sample_closing": "I would welcome the opportunity to discuss how my experience can contribute to your team's success.",
231
+ },
232
+ "enthusiastic": {
233
+ "structure": [
234
+ "Energetic opening that shows excitement",
235
+ "Passionate description of relevant experience",
236
+ "Connection to company mission and values",
237
+ "Enthusiastic closing with call to action",
238
+ ],
239
+ "sample_opening": "I am thrilled to apply for the [Position Title] position at [Company Name]!",
240
+ "sample_closing": "I can't wait to bring my passion and skills to your amazing team!",
241
+ },
242
+ }
243
+
244
+ return {
245
+ "success": True,
246
+ "tone": tone,
247
+ "template": templates.get(tone, templates["professional"]),
248
+ "tips": self._get_cover_letter_tips(tone),
249
+ }
src/tools/job_search_tool.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Job search tool for finding and ranking relevant jobs - jobs.search endpoint."""
2
+
3
+ from typing import Dict, Any
4
+
5
+ from ..services import JobSearchService
6
+
7
+
8
+ class JobSearchTool:
9
+ """Tool for searching and ranking job opportunities."""
10
+
11
+ def __init__(self):
12
+ self.job_search_service = JobSearchService()
13
+
14
+ def search(
15
+ self, user_id: str, query: str = "", location: str = "", job_type: str = ""
16
+ ) -> Dict[str, Any]:
17
+ """
18
+ Search for jobs and rank them by relevance to user profile.
19
+
20
+ This is the main jobs.search endpoint that pulls fresh job posts,
21
+ ranks them with GPU embeddings, and returns fit scores.
22
+
23
+ Args:
24
+ user_id: User identifier to get personalized results based on profile
25
+ query: Job search query/keywords (e.g., "Python developer", "Data scientist")
26
+ location: Preferred job location (e.g., "Remote", "New York", "San Francisco")
27
+ job_type: Type of job (e.g., "full-time", "part-time", "contract", "remote", "freelance")
28
+
29
+ Returns:
30
+ Dict with ranked job listings and fit scores:
31
+ {
32
+ "success": True,
33
+ "jobs": [
34
+ {
35
+ "id": "job_123",
36
+ "title": "Senior Python Developer",
37
+ "company": "TechCorp",
38
+ "location": "Remote",
39
+ "description": "Job description...",
40
+ "requirements": "Skills required...",
41
+ "salary": "$80,000 - $120,000",
42
+ "url": "https://company.com/jobs/123",
43
+ "posted_date": "2024-01-15T10:30:00",
44
+ "job_type": "full-time",
45
+ "fit_score": 85, # Percentage match (0-100)
46
+ "match_reasons": ["Skills match: Python, Django", "Location preference match"]
47
+ }
48
+ ],
49
+ "total_found": 10,
50
+ "search_params": {
51
+ "query": "Python developer",
52
+ "location": "Remote",
53
+ "job_type": "full-time"
54
+ },
55
+ "user_profile": {
56
+ "skills_count": 15,
57
+ "location": "Remote"
58
+ }
59
+ }
60
+ """
61
+ return self.job_search_service.search_jobs(user_id, query, location, job_type)
62
+
63
+ def get_job_details(self, job_id: str) -> Dict[str, Any]:
64
+ """
65
+ Get detailed information about a specific job.
66
+
67
+ Args:
68
+ job_id: Unique job identifier
69
+
70
+ Returns:
71
+ Dict with detailed job information or error message
72
+ """
73
+ # This would typically fetch from job cache or re-scrape
74
+ # For now, return a placeholder implementation
75
+ return {
76
+ "success": False,
77
+ "message": "Job details retrieval not implemented yet",
78
+ }
79
+
80
+ def get_search_suggestions(self, user_id: str) -> Dict[str, Any]:
81
+ """
82
+ Get personalized job search suggestions based on user profile.
83
+
84
+ Args:
85
+ user_id: User identifier
86
+
87
+ Returns:
88
+ Dict with suggested search queries and parameters
89
+ """
90
+ try:
91
+ from ..services import ProfileService
92
+
93
+ profile_service = ProfileService()
94
+ profile = profile_service.get_profile(user_id)
95
+
96
+ if not profile:
97
+ return {"success": False, "message": "User profile not found"}
98
+
99
+ # Generate suggestions based on skills and career goals
100
+ suggestions = []
101
+
102
+ # Skill-based suggestions
103
+ top_skills = profile.skills[:5] # Top 5 skills
104
+ for skill in top_skills:
105
+ suggestions.append(
106
+ {
107
+ "query": f"{skill} developer",
108
+ "reason": f"Based on your {skill} skills",
109
+ }
110
+ )
111
+
112
+ # Career goal-based suggestions
113
+ if "senior" in profile.career_goals.lower():
114
+ suggestions.append(
115
+ {
116
+ "query": "senior developer",
117
+ "reason": "Based on your career goals",
118
+ }
119
+ )
120
+
121
+ if "remote" in profile.career_goals.lower():
122
+ suggestions.append(
123
+ {
124
+ "location": "Remote",
125
+ "reason": "Based on your location preferences",
126
+ }
127
+ )
128
+
129
+ return {
130
+ "success": True,
131
+ "suggestions": suggestions[:10], # Limit to 10 suggestions
132
+ "profile_skills": profile.skills[:10],
133
+ }
134
+
135
+ except Exception as e:
136
+ return {"success": False, "message": f"Error getting suggestions: {str(e)}"}
137
+
138
+ def clear_job_cache(self) -> Dict[str, Any]:
139
+ """
140
+ Clear the job search cache to force fresh results.
141
+
142
+ Returns:
143
+ Dict with operation result
144
+ """
145
+ try:
146
+ self.job_search_service.jobs_cache = {}
147
+ self.job_search_service._save_cache()
148
+
149
+ # Also clear embedding index
150
+ self.job_search_service.embedding_service.clear_index()
151
+
152
+ return {
153
+ "success": True,
154
+ "message": "Job cache and embeddings cleared successfully",
155
+ }
156
+ except Exception as e:
157
+ return {"success": False, "message": f"Error clearing cache: {str(e)}"}
src/tools/profile_tool.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Profile tool for managing user profiles - profile.upsert endpoint."""
2
+
3
+ from typing import Dict, Any
4
+
5
+ from ..services import ProfileService
6
+
7
+
8
+ class ProfileTool:
9
+ """Tool for managing user profiles."""
10
+
11
+ def __init__(self):
12
+ self.profile_service = ProfileService()
13
+
14
+ def upsert(self, user_id: str, profile_data: str) -> Dict[str, Any]:
15
+ """
16
+ Store or update user profile.
17
+
18
+ This is the main profile.upsert endpoint that stores the user rΓ©sumΓ©,
19
+ skills, salary expectations, and career goals.
20
+
21
+ Args:
22
+ user_id: Unique identifier for the user
23
+ profile_data: JSON string containing user profile information
24
+ Expected format:
25
+ {
26
+ "resume": "Full resume text...",
27
+ "skills": ["Python", "JavaScript", "React", ...],
28
+ "salary_wish": "$80,000 - $120,000 annually",
29
+ "career_goals": "Looking to transition into senior developer role...",
30
+ "experience_level": "Mid-level", # Optional
31
+ "location": "Remote", # Optional
32
+ "education": "BS Computer Science", # Optional
33
+ "certifications": ["AWS Certified"] # Optional
34
+ }
35
+
36
+ Returns:
37
+ Dict with success status, message, and profile metadata
38
+ """
39
+ return self.profile_service.upsert_profile(user_id, profile_data)
40
+
41
+ def get(self, user_id: str) -> Dict[str, Any]:
42
+ """
43
+ Get user profile by ID.
44
+
45
+ Args:
46
+ user_id: Unique user identifier
47
+
48
+ Returns:
49
+ Dict with profile data or error message
50
+ """
51
+ try:
52
+ profile = self.profile_service.get_profile(user_id)
53
+ if profile:
54
+ return {"success": True, "profile": profile.dict()}
55
+ else:
56
+ return {"success": False, "message": "Profile not found"}
57
+ except Exception as e:
58
+ return {"success": False, "message": f"Error retrieving profile: {str(e)}"}
59
+
60
+ def delete(self, user_id: str) -> Dict[str, Any]:
61
+ """
62
+ Delete user profile.
63
+
64
+ Args:
65
+ user_id: Unique user identifier
66
+
67
+ Returns:
68
+ Dict with operation result
69
+ """
70
+ try:
71
+ deleted = self.profile_service.delete_profile(user_id)
72
+ if deleted:
73
+ return {"success": True, "message": "Profile deleted successfully"}
74
+ else:
75
+ return {"success": False, "message": "Profile not found"}
76
+ except Exception as e:
77
+ return {"success": False, "message": f"Error deleting profile: {str(e)}"}
78
+
79
+ def list_all(self) -> Dict[str, Any]:
80
+ """
81
+ List all user IDs with profiles.
82
+
83
+ Returns:
84
+ Dict with list of user IDs
85
+ """
86
+ try:
87
+ user_ids = self.profile_service.list_profiles()
88
+ return {"success": True, "user_ids": user_ids, "count": len(user_ids)}
89
+ except Exception as e:
90
+ return {"success": False, "message": f"Error listing profiles: {str(e)}"}
src/tools/qa_tool.py ADDED
@@ -0,0 +1,354 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Q&A tool for drafting responses to interview questions - qa.reply endpoint."""
2
+
3
+ from typing import Dict, Any, List
4
+
5
+ from ..services import LLMService, ProfileService
6
+
7
+
8
+ class QATool:
9
+ """Tool for generating responses to interview questions and client inquiries."""
10
+
11
+ def __init__(self):
12
+ self.llm_service = LLMService()
13
+ self.profile_service = ProfileService()
14
+
15
+ def reply(self, user_id: str, question: str, context: str = "") -> Dict[str, Any]:
16
+ """
17
+ Generate a response to an interview question or client inquiry.
18
+
19
+ This is the main qa.reply endpoint that drafts concise answers to
20
+ client questions like "Why should we hire you?" to speed up
21
+ Upwork, Fiverr, or LinkedIn chats.
22
+
23
+ Args:
24
+ user_id: User identifier to access profile for personalization
25
+ question: The question from potential employer/client to answer
26
+ context: Additional context about the conversation or role (optional)
27
+
28
+ Returns:
29
+ Dict with generated response and metadata:
30
+ {
31
+ "success": True,
32
+ "response": "I bring 5+ years of Python development experience...",
33
+ "confidence_score": 0.85, # 0.0 to 1.0 confidence in the response
34
+ "word_count": 78,
35
+ "estimated_delivery_time": "2-3 days", # For project-based questions
36
+ "follow_up_suggestions": ["What's the project timeline?", "What's the budget range?"]
37
+ }
38
+ """
39
+ try:
40
+ # Get user profile
41
+ profile = self.profile_service.get_profile(user_id)
42
+ if not profile:
43
+ return {
44
+ "success": False,
45
+ "message": "User profile not found. Please create a profile first.",
46
+ }
47
+
48
+ # Validate question
49
+ if not question or len(question.strip()) < 5:
50
+ return {
51
+ "success": False,
52
+ "message": "Question must be at least 5 characters long",
53
+ }
54
+
55
+ # Generate response
56
+ result = self.llm_service.generate_qa_response(profile, question, context)
57
+
58
+ # Add additional features if successful
59
+ if result.get("success") and result.get("response"):
60
+ # Add follow-up suggestions
61
+ result["follow_up_suggestions"] = self._get_follow_up_suggestions(
62
+ question, context
63
+ )
64
+
65
+ # Add estimated delivery time for project-related questions
66
+ if self._is_project_question(question):
67
+ result["estimated_delivery_time"] = self._estimate_delivery_time(
68
+ question, profile
69
+ )
70
+
71
+ # Add question category
72
+ result["question_category"] = self._categorize_question(question)
73
+
74
+ return result
75
+
76
+ except Exception as e:
77
+ return {"success": False, "message": f"Error generating response: {str(e)}"}
78
+
79
+ def batch_reply(
80
+ self, user_id: str, questions: List[Dict[str, str]]
81
+ ) -> Dict[str, Any]:
82
+ """
83
+ Generate responses to multiple questions at once.
84
+
85
+ Args:
86
+ user_id: User identifier
87
+ questions: List of question dictionaries with 'question' and optional 'context'
88
+ Example: [{"question": "Why should we hire you?", "context": "Senior role"}]
89
+
90
+ Returns:
91
+ Dict with multiple responses
92
+ """
93
+ try:
94
+ if not questions:
95
+ return {
96
+ "success": False,
97
+ "message": "At least one question is required",
98
+ }
99
+
100
+ responses = []
101
+ errors = []
102
+
103
+ for i, q_data in enumerate(questions):
104
+ question = q_data.get("question", "")
105
+ context = q_data.get("context", "")
106
+
107
+ result = self.reply(user_id, question, context)
108
+
109
+ if result.get("success"):
110
+ responses.append({"index": i, "question": question, **result})
111
+ else:
112
+ errors.append(
113
+ {
114
+ "index": i,
115
+ "question": question,
116
+ "error": result.get("message", "Unknown error"),
117
+ }
118
+ )
119
+
120
+ return {
121
+ "success": True,
122
+ "responses": responses,
123
+ "total_processed": len(responses),
124
+ "errors": errors if errors else None,
125
+ }
126
+
127
+ except Exception as e:
128
+ return {
129
+ "success": False,
130
+ "message": f"Error processing batch questions: {str(e)}",
131
+ }
132
+
133
+ def get_common_questions(self, job_type: str = "general") -> Dict[str, Any]:
134
+ """
135
+ Get a list of common interview questions with suggested approaches.
136
+
137
+ Args:
138
+ job_type: Type of job (e.g., "developer", "designer", "manager", "general")
139
+
140
+ Returns:
141
+ Dict with common questions and tips
142
+ """
143
+ common_questions = {
144
+ "general": [
145
+ "Tell me about yourself",
146
+ "Why are you interested in this role?",
147
+ "What are your greatest strengths?",
148
+ "What is your biggest weakness?",
149
+ "Where do you see yourself in 5 years?",
150
+ "Why should we hire you?",
151
+ "What motivates you?",
152
+ "How do you handle stress and pressure?",
153
+ "What's your ideal work environment?",
154
+ "Do you have any questions for us?",
155
+ ],
156
+ "developer": [
157
+ "Describe your development process",
158
+ "How do you stay updated with new technologies?",
159
+ "Tell me about a challenging bug you fixed",
160
+ "How do you ensure code quality?",
161
+ "What's your experience with version control?",
162
+ "How do you approach debugging?",
163
+ "What's your preferred development environment?",
164
+ "How do you handle tight deadlines?",
165
+ "Tell me about a project you're proud of",
166
+ "How do you learn new programming languages?",
167
+ ],
168
+ "freelance": [
169
+ "What's your hourly rate?",
170
+ "How long will this project take?",
171
+ "Can you show me examples of similar work?",
172
+ "What's your communication style?",
173
+ "How do you handle revisions?",
174
+ "What's included in your price?",
175
+ "Can you work in our timezone?",
176
+ "How do you ensure project quality?",
177
+ "What happens if you miss a deadline?",
178
+ "Can you sign an NDA?",
179
+ ],
180
+ }
181
+
182
+ questions = common_questions.get(job_type, common_questions["general"])
183
+
184
+ return {
185
+ "success": True,
186
+ "job_type": job_type,
187
+ "questions": questions,
188
+ "tips": self._get_interview_tips(job_type),
189
+ }
190
+
191
+ def practice_session(
192
+ self, user_id: str, job_type: str = "general", num_questions: int = 5
193
+ ) -> Dict[str, Any]:
194
+ """
195
+ Generate a practice interview session with questions and sample answers.
196
+
197
+ Args:
198
+ user_id: User identifier
199
+ job_type: Type of job for targeted questions
200
+ num_questions: Number of questions to include (default: 5)
201
+
202
+ Returns:
203
+ Dict with practice questions and personalized sample answers
204
+ """
205
+ try:
206
+ # Get common questions
207
+ questions_data = self.get_common_questions(job_type)
208
+ all_questions = questions_data["questions"]
209
+
210
+ # Select subset of questions
211
+ import random
212
+
213
+ selected_questions = random.sample(
214
+ all_questions, min(num_questions, len(all_questions))
215
+ )
216
+
217
+ # Generate responses for each question
218
+ practice_qa = []
219
+ for question in selected_questions:
220
+ response_result = self.reply(
221
+ user_id, question, f"Practice for {job_type} role"
222
+ )
223
+
224
+ if response_result.get("success"):
225
+ practice_qa.append(
226
+ {
227
+ "question": question,
228
+ "sample_answer": response_result["response"],
229
+ "tips": response_result.get("follow_up_suggestions", []),
230
+ }
231
+ )
232
+
233
+ return {
234
+ "success": True,
235
+ "job_type": job_type,
236
+ "practice_questions": practice_qa,
237
+ "total_questions": len(practice_qa),
238
+ "general_tips": self._get_interview_tips(job_type),
239
+ }
240
+
241
+ except Exception as e:
242
+ return {
243
+ "success": False,
244
+ "message": f"Error creating practice session: {str(e)}",
245
+ }
246
+
247
+ def _get_follow_up_suggestions(self, question: str, context: str) -> List[str]:
248
+ """Generate follow-up questions or conversation starters."""
249
+ suggestions = []
250
+
251
+ question_lower = question.lower()
252
+
253
+ if "project" in question_lower or "work" in question_lower:
254
+ suggestions.extend(
255
+ [
256
+ "What's the expected timeline for this project?",
257
+ "What's the budget range?",
258
+ "Who will I be working with?",
259
+ "What are the main deliverables?",
260
+ ]
261
+ )
262
+
263
+ if "experience" in question_lower or "skills" in question_lower:
264
+ suggestions.extend(
265
+ [
266
+ "Would you like to see examples of my work?",
267
+ "I can provide references from previous clients",
268
+ "What specific skills are most important for this role?",
269
+ ]
270
+ )
271
+
272
+ if "rate" in question_lower or "price" in question_lower:
273
+ suggestions.extend(
274
+ [
275
+ "I'm flexible on pricing for long-term projects",
276
+ "What's your budget for this project?",
277
+ "I can provide a detailed project breakdown",
278
+ ]
279
+ )
280
+
281
+ return suggestions[:3] # Limit to top 3 suggestions
282
+
283
+ def _is_project_question(self, question: str) -> bool:
284
+ """Determine if the question is about a specific project."""
285
+ project_keywords = [
286
+ "project",
287
+ "timeline",
288
+ "deadline",
289
+ "deliver",
290
+ "complete",
291
+ "build",
292
+ "develop",
293
+ "create",
294
+ ]
295
+ return any(keyword in question.lower() for keyword in project_keywords)
296
+
297
+ def _estimate_delivery_time(self, question: str, profile) -> str:
298
+ """Estimate project delivery time based on question and profile."""
299
+ # Simple estimation based on project complexity indicators
300
+ if any(word in question.lower() for word in ["simple", "basic", "small"]):
301
+ return "1-2 days"
302
+ elif any(word in question.lower() for word in ["complex", "large", "advanced"]):
303
+ return "1-2 weeks"
304
+ else:
305
+ return "3-5 days"
306
+
307
+ def _categorize_question(self, question: str) -> str:
308
+ """Categorize the type of question."""
309
+ question_lower = question.lower()
310
+
311
+ if any(
312
+ word in question_lower
313
+ for word in ["experience", "background", "skills", "qualification"]
314
+ ):
315
+ return "experience"
316
+ elif any(
317
+ word in question_lower
318
+ for word in ["project", "timeline", "deadline", "deliver"]
319
+ ):
320
+ return "project"
321
+ elif any(
322
+ word in question_lower
323
+ for word in ["rate", "price", "cost", "budget", "payment"]
324
+ ):
325
+ return "pricing"
326
+ elif any(word in question_lower for word in ["why", "motivation", "interest"]):
327
+ return "motivation"
328
+ else:
329
+ return "general"
330
+
331
+ def _get_interview_tips(self, job_type: str) -> List[str]:
332
+ """Get job-type specific interview tips."""
333
+ tips = {
334
+ "general": [
335
+ "Be specific with examples and achievements",
336
+ "Prepare questions to ask the interviewer",
337
+ "Research the company beforehand",
338
+ "Practice your elevator pitch",
339
+ ],
340
+ "developer": [
341
+ "Be ready to discuss your code and technical decisions",
342
+ "Prepare for coding challenges or technical questions",
343
+ "Show enthusiasm for learning new technologies",
344
+ "Discuss your development methodology",
345
+ ],
346
+ "freelance": [
347
+ "Have a portfolio ready to share",
348
+ "Be clear about your process and timeline",
349
+ "Discuss communication preferences upfront",
350
+ "Be professional but personable",
351
+ ],
352
+ }
353
+
354
+ return tips.get(job_type, tips["general"])
uv.lock ADDED
The diff for this file is too large to render. See raw diff