|
|
|
import time |
|
from .scraper_agent import ScraperAgent |
|
from .analyzer_agent import AnalyzerAgent |
|
from .content_agent import ContentAgent |
|
from memory.memory_manager import MemoryManager |
|
|
|
class ProfileOrchestrator: |
|
"""Main coordinator for all LinkedIn profile enhancement agents""" |
|
|
|
def __init__(self): |
|
self.scraper = ScraperAgent() |
|
self.analyzer = AnalyzerAgent() |
|
self.content_generator = ContentAgent() |
|
self.memory = MemoryManager() |
|
|
|
def enhance_profile(self, linkedin_url, job_description="", force_refresh=True): |
|
""" |
|
Main workflow for enhancing a LinkedIn profile |
|
|
|
Args: |
|
linkedin_url (str): LinkedIn profile URL |
|
job_description (str): Optional job description for tailored suggestions |
|
force_refresh (bool): Force fresh scraping instead of using cache |
|
|
|
Returns: |
|
str: Enhancement suggestions and analysis |
|
""" |
|
try: |
|
print(f"π― Starting profile enhancement for: {linkedin_url}") |
|
|
|
|
|
if force_refresh: |
|
print("ποΈ Clearing all cached data...") |
|
self.memory.force_refresh_session(linkedin_url) |
|
|
|
self.memory.clear_session_cache(linkedin_url) |
|
|
|
self.memory.clear_session_cache() |
|
|
|
|
|
print("π‘ Step 1: Scraping profile data...") |
|
print(f"π Target URL: {linkedin_url}") |
|
profile_data = self.scraper.extract_profile_data(linkedin_url) |
|
|
|
|
|
if profile_data.get('url') != linkedin_url: |
|
print(f"β οΈ URL mismatch detected!") |
|
print(f" Expected: {linkedin_url}") |
|
print(f" Got: {profile_data.get('url', 'Unknown')}") |
|
|
|
|
|
print("π Step 2: Analyzing profile...") |
|
analysis = self.analyzer.analyze_profile(profile_data, job_description) |
|
|
|
|
|
print("π‘ Step 3: Generating suggestions...") |
|
suggestions = self.content_generator.generate_suggestions(analysis, job_description) |
|
|
|
|
|
session_data = { |
|
'profile_data': profile_data, |
|
'analysis': analysis, |
|
'suggestions': suggestions, |
|
'job_description': job_description, |
|
'timestamp': time.strftime('%Y-%m-%d %H:%M:%S') |
|
} |
|
self.memory.store_session(linkedin_url, session_data) |
|
|
|
print("β
Profile enhancement completed!") |
|
return self._format_output(analysis, suggestions) |
|
|
|
except Exception as e: |
|
return f"Error in orchestration: {str(e)}" |
|
|
|
def _format_output(self, analysis, suggestions): |
|
"""Format the final output for display""" |
|
output = [] |
|
|
|
|
|
output.append("## π Profile Analysis") |
|
output.append("") |
|
output.append(f"**π Completeness Score:** {analysis.get('completeness_score', 0):.1f}%") |
|
output.append(f"**β Overall Rating:** {analysis.get('overall_rating', 'Unknown')}") |
|
output.append(f"**π― Job Match Score:** {analysis.get('job_match_score', 0):.1f}%") |
|
output.append("") |
|
|
|
|
|
strengths = analysis.get('strengths', []) |
|
if strengths: |
|
output.append("### π Profile Strengths") |
|
for strength in strengths: |
|
output.append(f"β
{strength}") |
|
output.append("") |
|
|
|
|
|
weaknesses = analysis.get('weaknesses', []) |
|
if weaknesses: |
|
output.append("### π§ Areas for Improvement") |
|
for weakness in weaknesses: |
|
output.append(f"πΈ {weakness}") |
|
output.append("") |
|
|
|
|
|
keyword_analysis = analysis.get('keyword_analysis', {}) |
|
if keyword_analysis: |
|
found_keywords = keyword_analysis.get('found_keywords', []) |
|
missing_keywords = keyword_analysis.get('missing_keywords', []) |
|
|
|
output.append("### οΏ½ Keyword Analysis") |
|
output.append(f"**Keywords Found ({len(found_keywords)}):** {', '.join(found_keywords[:10])}") |
|
if missing_keywords: |
|
output.append(f"**Missing Keywords:** {', '.join(missing_keywords[:5])}") |
|
output.append("") |
|
|
|
|
|
output.append("## π― Enhancement Suggestions") |
|
output.append("") |
|
|
|
for category, items in suggestions.items(): |
|
if category == 'ai_generated_content': |
|
|
|
output.append("### π€ AI-Generated Content Suggestions") |
|
ai_content = items if isinstance(items, dict) else {} |
|
|
|
if 'ai_headlines' in ai_content and ai_content['ai_headlines']: |
|
output.append("") |
|
output.append("#### β¨ Professional Headlines") |
|
for i, headline in enumerate(ai_content['ai_headlines'], 1): |
|
|
|
cleaned_headline = headline.strip('"').replace('\\"', '"') |
|
if cleaned_headline.startswith(('1.', '2.', '3.', '4.', '5.')): |
|
cleaned_headline = cleaned_headline[2:].strip() |
|
output.append(f"{i}. {cleaned_headline}") |
|
output.append("") |
|
|
|
if 'ai_about_section' in ai_content and ai_content['ai_about_section']: |
|
output.append("#### π Enhanced About Section") |
|
output.append("```") |
|
about_content = ai_content['ai_about_section'] |
|
|
|
about_lines = about_content.split('\n') |
|
for line in about_lines: |
|
if line.strip(): |
|
output.append(line.strip()) |
|
output.append("```") |
|
output.append("") |
|
|
|
if 'ai_experience_descriptions' in ai_content and ai_content['ai_experience_descriptions']: |
|
output.append("#### πΌ Experience Description Ideas") |
|
for desc in ai_content['ai_experience_descriptions']: |
|
output.append(f"β’ {desc}") |
|
output.append("") |
|
else: |
|
|
|
category_name = category.replace('_', ' ').title() |
|
output.append(f"### {category_name}") |
|
if isinstance(items, list): |
|
for item in items: |
|
output.append(f"β’ {item}") |
|
else: |
|
output.append(f"β’ {items}") |
|
output.append("") |
|
|
|
|
|
output.append("## π Implementation Roadmap") |
|
output.append("") |
|
recommendations = analysis.get('recommendations', []) |
|
if recommendations: |
|
output.append("### π― Priority Actions") |
|
for i, rec in enumerate(recommendations[:5], 1): |
|
output.append(f"{i}. {rec}") |
|
output.append("") |
|
|
|
output.append("### π General Best Practices") |
|
output.append("πΈ Update your profile regularly with new achievements") |
|
output.append("πΈ Use professional keywords relevant to your industry") |
|
output.append("πΈ Engage with your network by sharing valuable content") |
|
output.append("πΈ Ask for recommendations from colleagues and clients") |
|
output.append("πΈ Monitor profile views and connection requests") |
|
output.append("") |
|
|
|
output.append("---") |
|
output.append("*Analysis powered by AI β’ Data scraped with respect to LinkedIn's ToS*") |
|
|
|
return "\n".join(output) |
|
|