daniielyan's picture
πŸ”§ Update main.py to change MCP server flag, add Hugging Face dependencies in pyproject.toml, and enhance LLM service with Hugging Face integration. Add new job listings and user profiles in JSON data files.
4a5b92f
"""Settings and configuration for the Job Search MCP Server."""
import os
from functools import lru_cache
from typing import Optional
from dotenv import load_dotenv
from pydantic import Field
from pydantic_settings import BaseSettings
# Load environment variables from .env file
load_dotenv()
class Settings(BaseSettings):
"""Application settings and configuration."""
# API Keys
openai_api_key: Optional[str] = Field(default=None, env="OPENAI_API_KEY")
anthropic_api_key: Optional[str] = Field(default=None, env="ANTHROPIC_API_KEY")
hf_access_token: Optional[str] = Field(default=None, env="HF_ACCESS_TOKEN")
# Job Search APIs
linkedin_api_key: Optional[str] = Field(default=None, env="LINKEDIN_API_KEY")
indeed_api_key: Optional[str] = Field(default=None, env="INDEED_API_KEY")
remotive_api_url: str = Field(
default="https://remotive.com/api/remote-jobs", env="REMOTIVE_API_URL"
)
adzuna_app_id: Optional[str] = Field(default=None, env="ADZUNA_APP_ID")
adzuna_app_key: Optional[str] = Field(default=None, env="ADZUNA_APP_KEY")
adzuna_country: str = Field(default="gb", env="ADZUNA_COUNTRY")
# Embedding Model Settings
embedding_model: str = Field(default="all-MiniLM-L6-v2", env="EMBEDDING_MODEL")
embedding_dimension: int = Field(default=384, env="EMBEDDING_DIMENSION")
# LLM Settings
llm_provider: str = Field(
default="huggingface", env="LLM_PROVIDER"
) # openai, anthropic, huggingface
llm_model: str = Field(default="deepseek/deepseek-v3-turbo", env="LLM_MODEL")
hf_inference_provider: str = Field(
default="novita", env="HF_INFERENCE_PROVIDER"
) # novita, together, fireworks, etc.
max_tokens: int = Field(default=300, env="MAX_TOKENS")
temperature: float = Field(default=0.7, env="TEMPERATURE")
# Application Settings
app_name: str = Field(default="Job Search MCP Server", env="APP_NAME")
debug: bool = Field(default=False, env="DEBUG")
host: str = Field(default="127.0.0.1", env="HOST")
port: int = Field(default=7860, env="PORT")
# Storage Settings
profiles_db_path: str = Field(
default="./data/profiles.json", env="PROFILES_DB_PATH"
)
jobs_cache_path: str = Field(
default="./data/jobs_cache.json", env="JOBS_CACHE_PATH"
)
embeddings_cache_path: str = Field(
default="./data/embeddings.faiss", env="EMBEDDINGS_CACHE_PATH"
)
# Search Settings
max_jobs_per_search: int = Field(default=50, env="MAX_JOBS_PER_SEARCH")
similarity_threshold: float = Field(default=0.7, env="SIMILARITY_THRESHOLD")
class Config:
"""Pydantic config."""
env_file = ".env"
env_file_encoding = "utf-8"
@lru_cache()
def get_settings() -> Settings:
"""Get cached settings instance."""
settings = Settings()
# Debug print the HF token
if settings.hf_access_token:
print(
f"πŸ”‘ HF Access Token loaded: {settings.hf_access_token[:20]}...{settings.hf_access_token[-10:]}"
)
else:
print("❌ No HF Access Token found in environment variables")
return settings