Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| import os | |
| import gradio as gr | |
| from openai import OpenAI | |
| from dotenv import load_dotenv | |
| from pypdf import PdfReader | |
| load_dotenv(override=True) | |
| api_key = os.getenv("OPENROUTER_API_KEY") | |
| cv_link= os.getenv("CV_LINK") | |
| name = os.getenv("NAME") | |
| if not api_key: | |
| raise ValueError("OPENROUTER_API_KEY not found in .env file. Please set it.") | |
| client = OpenAI( | |
| base_url="https://openrouter.ai/api/v1", | |
| api_key=api_key | |
| ) | |
| summary = ( | |
| "I am a 3 years experienced Backend Software Engineer with a strong focus on building scalable, well-architected systems " | |
| "using NestJS, GraphQL, and PostgreSQL. I have proven experience in developing high-performance APIs, " | |
| "applying Domain-Driven Design (DDD), and leading AI-powered solutions such as RAG-based chatbots " | |
| "integrated with vector databases like Qdrant and Pinecone. I am skilled in clean architecture, " | |
| "microservices, and production-grade e-commerce systems, and I'm adept at bridging business needs with " | |
| "technical implementation through both code and technical writing. I have contributed to real-time systems " | |
| "using gRPC, Redis, and message queues (e.g., RabbitMQ). I'm passionate about automation, developer " | |
| "tools (e.g., n8n, Copilot, Cursor), and mentoring through technical content. As an ALX alumna, I have a " | |
| "hands-on mindset and a continuous learning attitude. I am also the author of a technical blog on " | |
| "software engineering and AI systems: shazaali.substack.com." | |
| ) | |
| linkedin_text = "My LinkedIn Profile: https://www.linkedin.com/in/shazaali/\n\n" | |
| try: | |
| reader = PdfReader("linkedin.pdf") | |
| for page in reader.pages: | |
| linkedin_text += page.extract_text() + "\n" | |
| except FileNotFoundError: | |
| print("Warning: 'linkedin.pdf' not found. The bot will rely only on the summary.") | |
| linkedin_text = "LinkedIn profile data is not available." | |
| system_prompt = ( | |
| f"You are acting as {name}. You are answering questions on my personal website, " | |
| f"particularly questions related to my career, background, skills, and experience. " | |
| f"Your responsibility is to represent me as faithfully as possible. " | |
| f"You are given a summary of my background and my LinkedIn profile to use for answering questions. " | |
| f"Be professional, friendly, and engaging, as if you are talking to a potential client or future employer. " | |
| f"If you don't know the answer based on the provided context, it's better to say so than to invent information. " | |
| f"Always stay in character as {name}. Answer in the same language as the user's question without disclosing any private information." | |
| f"\n\n## My Summary:\n{summary}\n\n## My LinkedIn Profile Text:\n{linkedin_text}\n\n## My CV:\n{cv_link}" | |
| ) | |
| def chat(message, history): | |
| """ | |
| Handles the chat logic by formatting messages and calling the OpenAI API. | |
| """ | |
| formatted_history = [] | |
| for user_msg, assistant_msg in history: | |
| formatted_history.append({"role": "user", "content": user_msg}) | |
| formatted_history.append({"role": "assistant", "content": assistant_msg}) | |
| messages = [ | |
| {"role": "system", "content": system_prompt}, | |
| *formatted_history, | |
| {"role": "user", "content": message} | |
| ] | |
| try: | |
| response = client.chat.completions.create( | |
| model="openai/gpt-3.5-turbo", | |
| max_tokens=300, | |
| messages=messages, | |
| temperature=0.7 | |
| ) | |
| return response.choices[0].message.content | |
| except Exception as e: | |
| print(f"An error occurred: {e}") | |
| return "Sorry, I encountered an error while processing your request. Please try again." | |
| interface = gr.ChatInterface( | |
| fn=chat, | |
| title=f"Chat with {name}", | |
| description="Ask me about my experience, skills, or projects", | |
| examples=[ | |
| ["What are your main technical skills?"], | |
| ["Tell me about your experience with AI and RAG chatbots."], | |
| ["¿Hablas español?"] | |
| ] | |
| ) | |
| if __name__ == "__main__": | |
| interface.launch() |