File size: 8,537 Bytes
5e5e890 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
# Main Agent Coordinator
import time
from .scraper_agent import ScraperAgent
from .analyzer_agent import AnalyzerAgent
from .content_agent import ContentAgent
from memory.memory_manager import MemoryManager
class ProfileOrchestrator:
"""Main coordinator for all LinkedIn profile enhancement agents"""
def __init__(self):
self.scraper = ScraperAgent()
self.analyzer = AnalyzerAgent()
self.content_generator = ContentAgent()
self.memory = MemoryManager()
def enhance_profile(self, linkedin_url, job_description="", force_refresh=True):
"""
Main workflow for enhancing a LinkedIn profile
Args:
linkedin_url (str): LinkedIn profile URL
job_description (str): Optional job description for tailored suggestions
force_refresh (bool): Force fresh scraping instead of using cache
Returns:
str: Enhancement suggestions and analysis
"""
try:
print(f"π― Starting profile enhancement for: {linkedin_url}")
# Always clear cache for fresh data extraction
if force_refresh:
print("ποΈ Clearing all cached data...")
self.memory.force_refresh_session(linkedin_url)
# Clear any session data for this URL
self.memory.clear_session_cache(linkedin_url)
# Also clear any general cache
self.memory.clear_session_cache() # Clear all sessions
# Step 1: Scrape LinkedIn profile data
print("π‘ Step 1: Scraping profile data...")
print(f"π Target URL: {linkedin_url}")
profile_data = self.scraper.extract_profile_data(linkedin_url)
# Verify we got data for the correct URL
if profile_data.get('url') != linkedin_url:
print(f"β οΈ URL mismatch detected!")
print(f" Expected: {linkedin_url}")
print(f" Got: {profile_data.get('url', 'Unknown')}")
# Step 2: Analyze the profile
print("π Step 2: Analyzing profile...")
analysis = self.analyzer.analyze_profile(profile_data, job_description)
# Step 3: Generate enhancement suggestions
print("π‘ Step 3: Generating suggestions...")
suggestions = self.content_generator.generate_suggestions(analysis, job_description)
# Step 4: Store in memory for future reference
session_data = {
'profile_data': profile_data,
'analysis': analysis,
'suggestions': suggestions,
'job_description': job_description,
'timestamp': time.strftime('%Y-%m-%d %H:%M:%S')
}
self.memory.store_session(linkedin_url, session_data)
print("β
Profile enhancement completed!")
return self._format_output(analysis, suggestions)
except Exception as e:
return f"Error in orchestration: {str(e)}"
def _format_output(self, analysis, suggestions):
"""Format the final output for display"""
output = []
# Profile Analysis Section
output.append("## π Profile Analysis")
output.append("")
output.append(f"**π Completeness Score:** {analysis.get('completeness_score', 0):.1f}%")
output.append(f"**β Overall Rating:** {analysis.get('overall_rating', 'Unknown')}")
output.append(f"**π― Job Match Score:** {analysis.get('job_match_score', 0):.1f}%")
output.append("")
# Strengths
strengths = analysis.get('strengths', [])
if strengths:
output.append("### π Profile Strengths")
for strength in strengths:
output.append(f"β
{strength}")
output.append("")
# Areas for Improvement
weaknesses = analysis.get('weaknesses', [])
if weaknesses:
output.append("### π§ Areas for Improvement")
for weakness in weaknesses:
output.append(f"πΈ {weakness}")
output.append("")
# Keyword Analysis
keyword_analysis = analysis.get('keyword_analysis', {})
if keyword_analysis:
found_keywords = keyword_analysis.get('found_keywords', [])
missing_keywords = keyword_analysis.get('missing_keywords', [])
output.append("### οΏ½ Keyword Analysis")
output.append(f"**Keywords Found ({len(found_keywords)}):** {', '.join(found_keywords[:10])}")
if missing_keywords:
output.append(f"**Missing Keywords:** {', '.join(missing_keywords[:5])}")
output.append("")
# Enhancement Suggestions Section
output.append("## π― Enhancement Suggestions")
output.append("")
for category, items in suggestions.items():
if category == 'ai_generated_content':
# Special formatting for AI content
output.append("### π€ AI-Generated Content Suggestions")
ai_content = items if isinstance(items, dict) else {}
if 'ai_headlines' in ai_content and ai_content['ai_headlines']:
output.append("")
output.append("#### β¨ Professional Headlines")
for i, headline in enumerate(ai_content['ai_headlines'], 1):
# Clean up the headline format
cleaned_headline = headline.strip('"').replace('\\"', '"')
if cleaned_headline.startswith(('1.', '2.', '3.', '4.', '5.')):
cleaned_headline = cleaned_headline[2:].strip()
output.append(f"{i}. {cleaned_headline}")
output.append("")
if 'ai_about_section' in ai_content and ai_content['ai_about_section']:
output.append("#### π Enhanced About Section")
output.append("```")
about_content = ai_content['ai_about_section']
# Clean up the about section
about_lines = about_content.split('\n')
for line in about_lines:
if line.strip():
output.append(line.strip())
output.append("```")
output.append("")
if 'ai_experience_descriptions' in ai_content and ai_content['ai_experience_descriptions']:
output.append("#### πΌ Experience Description Ideas")
for desc in ai_content['ai_experience_descriptions']:
output.append(f"β’ {desc}")
output.append("")
else:
# Standard formatting for other categories
category_name = category.replace('_', ' ').title()
output.append(f"### {category_name}")
if isinstance(items, list):
for item in items:
output.append(f"β’ {item}")
else:
output.append(f"β’ {items}")
output.append("")
# Next Steps Section
output.append("## π Implementation Roadmap")
output.append("")
recommendations = analysis.get('recommendations', [])
if recommendations:
output.append("### π― Priority Actions")
for i, rec in enumerate(recommendations[:5], 1):
output.append(f"{i}. {rec}")
output.append("")
output.append("### π General Best Practices")
output.append("πΈ Update your profile regularly with new achievements")
output.append("πΈ Use professional keywords relevant to your industry")
output.append("πΈ Engage with your network by sharing valuable content")
output.append("πΈ Ask for recommendations from colleagues and clients")
output.append("πΈ Monitor profile views and connection requests")
output.append("")
output.append("---")
output.append("*Analysis powered by AI β’ Data scraped with respect to LinkedIn's ToS*")
return "\n".join(output)
|