jobsearch-mcp-server / env.example
daniielyan's picture
Prepare for Gradio Agents & MCP Hackathon 2025 submission - Add comprehensive README.md with mcp-server-track tag - Create app.py entry point for Hugging Face Spaces - Update requirements.txt with all dependencies - Add env.example for environment configuration - Ready for Track 1: MCP Server/Tool submission
f6f24f7
# Job Search MCP Server - Environment Variables Configuration
# Copy this file to .env and fill in your API keys
# Required: LLM API Keys (at least one is required)
OPENAI_API_KEY=your_openai_api_key_here
ANTHROPIC_API_KEY=your_anthropic_api_key_here
HF_ACCESS_TOKEN=your_huggingface_access_token_here
# Optional: Job Search API Keys (for enhanced job data)
LINKEDIN_API_KEY=your_linkedin_api_key
INDEED_API_KEY=your_indeed_api_key
ADZUNA_APP_ID=your_adzuna_app_id
ADZUNA_APP_KEY=your_adzuna_app_key
ADZUNA_COUNTRY=gb
# LLM Configuration
LLM_PROVIDER=huggingface # Options: openai, anthropic, huggingface
LLM_MODEL=deepseek/deepseek-v3-turbo # Model to use
HF_INFERENCE_PROVIDER=novita # HF Inference provider: novita, together, fireworks
MAX_TOKENS=300
TEMPERATURE=0.7
# Embedding Model (GPU processing)
EMBEDDING_MODEL=all-MiniLM-L6-v2
EMBEDDING_DIMENSION=384
# Application Settings
APP_NAME=Job Search MCP Server
DEBUG=false
HOST=127.0.0.1
PORT=7860
# Data Storage Paths
PROFILES_DB_PATH=./data/profiles.json
JOBS_CACHE_PATH=./data/jobs_cache.json
EMBEDDINGS_CACHE_PATH=./data/embeddings.faiss
# Search Configuration
MAX_JOBS_PER_SEARCH=50
SIMILARITY_THRESHOLD=0.7
# External APIs
REMOTIVE_API_URL=https://remotive.com/api/remote-jobs