daniielyan commited on
Commit
e84eb34
Β·
1 Parent(s): 33d7d69

πŸš€ Initial Boilerplate setup

Browse files
Files changed (3) hide show
  1. app.py +230 -0
  2. config.py +62 -0
  3. requirements.txt +14 -0
app.py ADDED
@@ -0,0 +1,230 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import json
3
+ from typing import Dict, List, Any
4
+
5
+
6
+ class JobSearchMCP:
7
+ """Job Search MCP Server - Smart job matching and application helper"""
8
+
9
+ def __init__(self):
10
+ self.user_profiles = {} # In-memory storage for demo purposes
11
+
12
+ def profile_upsert(self, user_id: str, profile_data: str) -> Dict[str, Any]:
13
+ """
14
+ Stores/updates user rΓ©sumΓ©, skills, salary expectations, and career goals
15
+
16
+ Args:
17
+ user_id: Unique identifier for the user
18
+ profile_data: JSON string containing user profile information
19
+
20
+ Returns:
21
+ Dict with success status and message
22
+ """
23
+ try:
24
+ # TODO: Implement profile storage logic
25
+ # - Parse profile_data JSON
26
+ # - Validate required fields (resume, skills, salary_wish, career_goals)
27
+ # - Store in database or persistent storage
28
+ # - Return success/error response
29
+
30
+ return {
31
+ "success": True,
32
+ "message": "Profile updated successfully",
33
+ "user_id": user_id,
34
+ }
35
+ except Exception as e:
36
+ return {"success": False, "message": f"Error updating profile: {str(e)}"}
37
+
38
+ def jobs_search(
39
+ self, user_id: str, query: str = "", location: str = "", job_type: str = ""
40
+ ) -> Dict[str, Any]:
41
+ """
42
+ Pulls fresh job posts, ranks them with GPU embeddings, and returns fit scores
43
+
44
+ Args:
45
+ user_id: User identifier to get personalized results
46
+ query: Job search query/keywords
47
+ location: Preferred job location
48
+ job_type: Type of job (full-time, contract, remote, etc.)
49
+
50
+ Returns:
51
+ Dict with ranked job listings and fit scores
52
+ """
53
+ try:
54
+ # TODO: Implement job search logic
55
+ # - Fetch jobs from various APIs (LinkedIn, Indeed, etc.)
56
+ # - Use GPU embeddings to calculate job-profile fit scores
57
+ # - Rank jobs by relevance and fit score
58
+ # - Return top matches with metadata
59
+
60
+ return {
61
+ "success": True,
62
+ "jobs": [],
63
+ "total_found": 0,
64
+ "search_params": {
65
+ "query": query,
66
+ "location": location,
67
+ "job_type": job_type,
68
+ },
69
+ }
70
+ except Exception as e:
71
+ return {"success": False, "message": f"Error searching jobs: {str(e)}"}
72
+
73
+ def letter_generate(
74
+ self, user_id: str, job_description: str, tone: str = "professional"
75
+ ) -> Dict[str, Any]:
76
+ """
77
+ Generates personalized cover letters using LLM
78
+
79
+ Args:
80
+ user_id: User identifier to access profile
81
+ job_description: The job posting description
82
+ tone: Tone of the cover letter (professional, casual, enthusiastic, etc.)
83
+
84
+ Returns:
85
+ Dict with generated cover letter
86
+ """
87
+ try:
88
+ # TODO: Implement cover letter generation
89
+ # - Retrieve user profile
90
+ # - Use LLM to generate personalized cover letter
91
+ # - Match user skills with job requirements
92
+ # - Apply specified tone
93
+ # - Return formatted cover letter
94
+
95
+ return {
96
+ "success": True,
97
+ "cover_letter": "",
98
+ "tone_used": tone,
99
+ "word_count": 0,
100
+ }
101
+ except Exception as e:
102
+ return {
103
+ "success": False,
104
+ "message": f"Error generating cover letter: {str(e)}",
105
+ }
106
+
107
+ def qa_reply(
108
+ self, user_id: str, question: str, context: str = ""
109
+ ) -> Dict[str, Any]:
110
+ """
111
+ Drafts concise answers to client questions
112
+
113
+ Args:
114
+ user_id: User identifier to access profile
115
+ question: The question from potential employer/client
116
+ context: Additional context about the conversation
117
+
118
+ Returns:
119
+ Dict with generated response
120
+ """
121
+ try:
122
+ # TODO: Implement Q&A response generation
123
+ # - Retrieve user profile and experience
124
+ # - Generate contextual response using LLM
125
+ # - Keep response concise and professional
126
+ # - Tailor to user's background and skills
127
+
128
+ return {"success": True, "response": "", "confidence_score": 0.0}
129
+ except Exception as e:
130
+ return {"success": False, "message": f"Error generating response: {str(e)}"}
131
+
132
+
133
+ # Initialize the MCP server
134
+ mcp_server = JobSearchMCP()
135
+
136
+ # Create Gradio interface for each endpoint
137
+ with gr.Blocks(title="Job Search MCP Server") as demo:
138
+ gr.Markdown("# πŸ” Job Search MCP Server")
139
+ gr.Markdown("Smart job matching and instant application helper")
140
+
141
+ with gr.Tab("Profile Management"):
142
+ gr.Markdown("### Store and update your professional profile")
143
+ with gr.Row():
144
+ profile_user_id = gr.Textbox(
145
+ label="User ID", placeholder="Enter your unique user ID"
146
+ )
147
+ profile_data = gr.TextArea(
148
+ label="Profile Data (JSON)",
149
+ placeholder='{"resume": "...", "skills": [...], "salary_wish": "...", "career_goals": "..."}',
150
+ lines=5,
151
+ )
152
+ profile_submit = gr.Button("Update Profile", variant="primary")
153
+ profile_output = gr.JSON(label="Response")
154
+
155
+ profile_submit.click(
156
+ fn=mcp_server.profile_upsert,
157
+ inputs=[profile_user_id, profile_data],
158
+ outputs=profile_output,
159
+ )
160
+
161
+ with gr.Tab("Job Search"):
162
+ gr.Markdown("### Find and rank relevant job opportunities")
163
+ with gr.Row():
164
+ search_user_id = gr.Textbox(label="User ID")
165
+ search_query = gr.Textbox(
166
+ label="Search Query", placeholder="e.g., Python developer"
167
+ )
168
+ with gr.Row():
169
+ search_location = gr.Textbox(
170
+ label="Location", placeholder="e.g., Remote, New York"
171
+ )
172
+ search_job_type = gr.Dropdown(
173
+ label="Job Type",
174
+ choices=["full-time", "part-time", "contract", "freelance", "remote"],
175
+ value="full-time",
176
+ )
177
+ search_submit = gr.Button("Search Jobs", variant="primary")
178
+ search_output = gr.JSON(label="Job Results")
179
+
180
+ search_submit.click(
181
+ fn=mcp_server.jobs_search,
182
+ inputs=[search_user_id, search_query, search_location, search_job_type],
183
+ outputs=search_output,
184
+ )
185
+
186
+ with gr.Tab("Cover Letter Generator"):
187
+ gr.Markdown("### Generate personalized cover letters")
188
+ with gr.Row():
189
+ letter_user_id = gr.Textbox(label="User ID")
190
+ letter_tone = gr.Dropdown(
191
+ label="Tone",
192
+ choices=["professional", "casual", "enthusiastic", "formal"],
193
+ value="professional",
194
+ )
195
+ letter_job_desc = gr.TextArea(
196
+ label="Job Description",
197
+ placeholder="Paste the job description here...",
198
+ lines=5,
199
+ )
200
+ letter_submit = gr.Button("Generate Cover Letter", variant="primary")
201
+ letter_output = gr.JSON(label="Generated Letter")
202
+
203
+ letter_submit.click(
204
+ fn=mcp_server.letter_generate,
205
+ inputs=[letter_user_id, letter_job_desc, letter_tone],
206
+ outputs=letter_output,
207
+ )
208
+
209
+ with gr.Tab("Q&A Assistant"):
210
+ gr.Markdown("### Get help with interview questions and client responses")
211
+ with gr.Row():
212
+ qa_user_id = gr.Textbox(label="User ID")
213
+ qa_context = gr.Textbox(
214
+ label="Context (optional)", placeholder="Additional context..."
215
+ )
216
+ qa_question = gr.TextArea(
217
+ label="Question", placeholder="e.g., Why should we hire you?", lines=3
218
+ )
219
+ qa_submit = gr.Button("Generate Response", variant="primary")
220
+ qa_output = gr.JSON(label="Generated Response")
221
+
222
+ qa_submit.click(
223
+ fn=mcp_server.qa_reply,
224
+ inputs=[qa_user_id, qa_question, qa_context],
225
+ outputs=qa_output,
226
+ )
227
+
228
+ if __name__ == "__main__":
229
+ # Enable MCP server functionality
230
+ demo.launch(enable_mcp=True)
config.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+
4
+ # Load environment variables from .env file
5
+ load_dotenv()
6
+
7
+
8
+ class Config:
9
+ """Configuration settings for Job Search MCP Server"""
10
+
11
+ # API Keys
12
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
13
+ ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
14
+ HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
15
+
16
+ # Job Search APIs
17
+ LINKEDIN_API_KEY = os.getenv("LINKEDIN_API_KEY")
18
+ INDEED_API_KEY = os.getenv("INDEED_API_KEY")
19
+ GLASSDOOR_API_KEY = os.getenv("GLASSDOOR_API_KEY")
20
+
21
+ # Database Settings (for future implementation)
22
+ DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///jobsearch.db")
23
+
24
+ # Embedding Model Settings
25
+ EMBEDDING_MODEL = os.getenv(
26
+ "EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"
27
+ )
28
+ EMBEDDING_CACHE_SIZE = int(os.getenv("EMBEDDING_CACHE_SIZE", "1000"))
29
+
30
+ # LLM Settings
31
+ DEFAULT_LLM_MODEL = os.getenv("DEFAULT_LLM_MODEL", "gpt-3.5-turbo")
32
+ MAX_TOKENS = int(os.getenv("MAX_TOKENS", "1000"))
33
+ TEMPERATURE = float(os.getenv("TEMPERATURE", "0.7"))
34
+
35
+ # Job Search Settings
36
+ MAX_JOBS_PER_SEARCH = int(os.getenv("MAX_JOBS_PER_SEARCH", "50"))
37
+ MIN_MATCH_SCORE = float(os.getenv("MIN_MATCH_SCORE", "0.6"))
38
+
39
+ # Cover Letter Settings
40
+ MAX_COVER_LETTER_WORDS = int(os.getenv("MAX_COVER_LETTER_WORDS", "300"))
41
+
42
+ # Server Settings
43
+ SERVER_HOST = os.getenv("SERVER_HOST", "127.0.0.1")
44
+ SERVER_PORT = int(os.getenv("SERVER_PORT", "7860"))
45
+ DEBUG = os.getenv("DEBUG", "False").lower() == "true"
46
+
47
+ @classmethod
48
+ def validate_config(cls):
49
+ """Validate that required configuration is present"""
50
+ required_keys = ["OPENAI_API_KEY", "ANTHROPIC_API_KEY"]
51
+
52
+ missing_keys = []
53
+ for key in required_keys:
54
+ if not getattr(cls, key):
55
+ missing_keys.append(key)
56
+
57
+ if missing_keys:
58
+ raise ValueError(
59
+ f"Missing required environment variables: {', '.join(missing_keys)}"
60
+ )
61
+
62
+ return True
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ gradio[mcp]>=5.0.0
2
+ openai>=1.0.0
3
+ anthropic>=0.30.0
4
+ requests>=2.31.0
5
+ python-dotenv>=1.0.0
6
+ numpy>=1.24.0
7
+ sentence-transformers>=2.2.0
8
+ scikit-learn>=1.3.0
9
+ pandas>=2.0.0
10
+ beautifulsoup4>=4.12.0
11
+ lxml>=4.9.0
12
+ httpx>=0.24.0
13
+ pydantic>=2.0.0
14
+ python-multipart>=0.0.6