Spaces:
Running
on
Zero
Running
on
Zero
Create Job_application.py
#8
by
Geethuzzz
- opened
- Job_application.py +166 -0
Job_application.py
ADDED
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Import required libraries
|
2 |
+
import os
|
3 |
+
import streamlit as st
|
4 |
+
from crewai import Agent, Task, Crew, LLM
|
5 |
+
from crewai_tools import (
|
6 |
+
SerperDevTool,
|
7 |
+
FileReadTool,
|
8 |
+
MDXSearchTool,
|
9 |
+
ScrapeWebsiteTool
|
10 |
+
)
|
11 |
+
from embedchain import App
|
12 |
+
from embedchain.embedder import GeminiEmbedder
|
13 |
+
|
14 |
+
# Set up API keys
|
15 |
+
gemini_api_key = "AIzaSyDzsGJCnE3017fYnOM0Fp_aWs4YtN4d4TI"
|
16 |
+
serper_api_key = "b86545fdabc35dcb13fd8cc0a9b88c3a17b6dc89"
|
17 |
+
|
18 |
+
# Configure environment variables
|
19 |
+
os.environ["GEMINIAI_API_KEY"] = gemini_api_key
|
20 |
+
os.environ["SERPER_API_KEY"] = serper_api_key
|
21 |
+
|
22 |
+
# Initialize Gemini embedding model
|
23 |
+
gemini_embedder = GeminiEmbedder(api_key=gemini_api_key)
|
24 |
+
|
25 |
+
# Initialize EmbedChain App with Gemini
|
26 |
+
embedchain_app = App(embedding_model=gemini_embedder)
|
27 |
+
|
28 |
+
# Initialize Tools
|
29 |
+
search_tool = SerperDevTool()
|
30 |
+
scrape_tool = ScrapeWebsiteTool()
|
31 |
+
resume_file_path = 'resume.md' # Use a relative or dynamic file path
|
32 |
+
read_resume = FileReadTool(file_path=resume_file_path)
|
33 |
+
semantic_search_resume = MDXSearchTool(mdx=resume_file_path, embedding_model=gemini_embedder, app=embedchain_app)
|
34 |
+
|
35 |
+
# Agent 1: Researcher
|
36 |
+
researcher = Agent(
|
37 |
+
role="Tech Job Researcher",
|
38 |
+
goal="Analyze job postings and extract required qualifications.",
|
39 |
+
tools=[scrape_tool, search_tool],
|
40 |
+
verbose=True,
|
41 |
+
backstory=(
|
42 |
+
"An expert in analyzing job postings, you identify essential skills "
|
43 |
+
"and qualifications required for job applications."
|
44 |
+
)
|
45 |
+
)
|
46 |
+
|
47 |
+
# Agent 2: Profiler
|
48 |
+
profiler = Agent(
|
49 |
+
role="Personal Profiler for Engineers",
|
50 |
+
goal="Create a detailed profile for job applicants.",
|
51 |
+
tools=[read_resume, semantic_search_resume],
|
52 |
+
verbose=True,
|
53 |
+
backstory=(
|
54 |
+
"Specializing in building comprehensive profiles, you extract and "
|
55 |
+
"synthesize information to create impactful resumes."
|
56 |
+
)
|
57 |
+
)
|
58 |
+
|
59 |
+
# Agent 3: Resume Strategist
|
60 |
+
resume_strategist = Agent(
|
61 |
+
role="Resume Strategist for Engineers",
|
62 |
+
goal="Refine resumes to align with job requirements.",
|
63 |
+
tools=[read_resume, semantic_search_resume],
|
64 |
+
verbose=True,
|
65 |
+
backstory=(
|
66 |
+
"Your expertise lies in crafting resumes that highlight key skills "
|
67 |
+
"and experiences to match job requirements."
|
68 |
+
)
|
69 |
+
)
|
70 |
+
|
71 |
+
# Agent 4: Interview Preparer
|
72 |
+
interview_preparer = Agent(
|
73 |
+
role="Interview Preparer",
|
74 |
+
goal="Generate potential interview questions and talking points.",
|
75 |
+
tools=[read_resume, semantic_search_resume],
|
76 |
+
verbose=True,
|
77 |
+
backstory=(
|
78 |
+
"You prepare candidates for interviews by formulating relevant questions "
|
79 |
+
"and talking points based on the job and their profile."
|
80 |
+
)
|
81 |
+
)
|
82 |
+
|
83 |
+
# Define Tasks
|
84 |
+
research_task = Task(
|
85 |
+
description="Analyze the job posting URL to extract key skills, qualifications, and requirements.",
|
86 |
+
expected_output="A structured list of job requirements.",
|
87 |
+
agent=researcher,
|
88 |
+
async_execution=True
|
89 |
+
)
|
90 |
+
|
91 |
+
profile_task = Task(
|
92 |
+
description="Create a detailed profile from the resume and personal write-up.",
|
93 |
+
expected_output="A comprehensive profile document.",
|
94 |
+
agent=profiler,
|
95 |
+
async_execution=True
|
96 |
+
)
|
97 |
+
|
98 |
+
resume_strategy_task = Task(
|
99 |
+
description="Tailor the resume based on job requirements and personal profile.",
|
100 |
+
expected_output="An updated resume tailored to the job.",
|
101 |
+
output_file="tailored_resume.md",
|
102 |
+
context=[research_task, profile_task],
|
103 |
+
agent=resume_strategist
|
104 |
+
)
|
105 |
+
|
106 |
+
interview_preparation_task = Task(
|
107 |
+
description="Generate interview questions and talking points based on the tailored resume.",
|
108 |
+
expected_output="A document with key interview questions and talking points.",
|
109 |
+
output_file="interview_materials.md",
|
110 |
+
context=[research_task, profile_task, resume_strategy_task],
|
111 |
+
agent=interview_preparer
|
112 |
+
)
|
113 |
+
|
114 |
+
# Crew Setup
|
115 |
+
job_application_crew = Crew(
|
116 |
+
agents=[researcher, profiler, resume_strategist, interview_preparer],
|
117 |
+
tasks=[research_task, profile_task, resume_strategy_task, interview_preparation_task],
|
118 |
+
verbose=True
|
119 |
+
)
|
120 |
+
|
121 |
+
# Streamlit Application
|
122 |
+
st.title("AI-Powered Job Application Assistant")
|
123 |
+
|
124 |
+
# User Inputs
|
125 |
+
st.header("Provide Job Details")
|
126 |
+
job_posting_url = st.text_input("Job Posting URL", "https://jobs.lever.co/AIFund/6c82e23e-d954-4dd8-a734-c0c2c5ee00f1")
|
127 |
+
github_url = st.text_input("GitHub Profile URL", "https://github.com/joaomdmoura")
|
128 |
+
personal_writeup = st.text_area(
|
129 |
+
"Personal Writeup",
|
130 |
+
"""Noah is an accomplished Software Engineering Leader with 18 years of experience,
|
131 |
+
specializing in managing remote and in-office teams. He holds an MBA and has a strong
|
132 |
+
background in AI and data science. Noah has successfully led major tech initiatives
|
133 |
+
and startups, driving innovation and growth."""
|
134 |
+
)
|
135 |
+
|
136 |
+
# File Upload for Resume
|
137 |
+
st.header("Upload Resume")
|
138 |
+
uploaded_resume = st.file_uploader("Upload your resume (Markdown format)", type=["md"])
|
139 |
+
if uploaded_resume:
|
140 |
+
resume_file_path = os.path.join("uploads", uploaded_resume.name)
|
141 |
+
with open(resume_file_path, "wb") as f:
|
142 |
+
f.write(uploaded_resume.getbuffer())
|
143 |
+
st.success("Resume uploaded successfully!")
|
144 |
+
|
145 |
+
# Start Job Application Process
|
146 |
+
if st.button("Start Job Application Process"):
|
147 |
+
job_application_inputs = {
|
148 |
+
'job_posting_url': job_posting_url,
|
149 |
+
'github_url': github_url,
|
150 |
+
'personal_writeup': personal_writeup
|
151 |
+
}
|
152 |
+
|
153 |
+
# Run the Crew
|
154 |
+
result = job_application_crew.kickoff(inputs=job_application_inputs)
|
155 |
+
st.success("Job Application Process Completed!")
|
156 |
+
|
157 |
+
# Display Results
|
158 |
+
if os.path.exists("tailored_resume.md"):
|
159 |
+
st.header("Generated Tailored Resume")
|
160 |
+
with open("tailored_resume.md", "r") as f:
|
161 |
+
st.markdown(f.read(), unsafe_allow_html=True)
|
162 |
+
|
163 |
+
if os.path.exists("interview_materials.md"):
|
164 |
+
st.header("Generated Interview Materials")
|
165 |
+
with open("interview_materials.md", "r") as f:
|
166 |
+
st.markdown(f.read(), unsafe_allow_html=True)
|