File size: 3,152 Bytes
4fd18a2
 
 
 
 
4a5b92f
4fd18a2
9e0d988
 
4fd18a2
4a5b92f
 
 
4fd18a2
 
 
 
 
 
 
4a5b92f
4fd18a2
 
 
 
ed44c7c
 
 
 
 
 
4fd18a2
 
 
 
 
 
4a5b92f
 
 
 
 
 
 
4fd18a2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4a5b92f
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
"""Settings and configuration for the Job Search MCP Server."""

import os
from functools import lru_cache
from typing import Optional
from dotenv import load_dotenv

from pydantic import Field
from pydantic_settings import BaseSettings

# Load environment variables from .env file
load_dotenv()


class Settings(BaseSettings):
    """Application settings and configuration."""

    # API Keys
    openai_api_key: Optional[str] = Field(default=None, env="OPENAI_API_KEY")
    anthropic_api_key: Optional[str] = Field(default=None, env="ANTHROPIC_API_KEY")
    hf_access_token: Optional[str] = Field(default=None, env="HF_ACCESS_TOKEN")

    # Job Search APIs
    linkedin_api_key: Optional[str] = Field(default=None, env="LINKEDIN_API_KEY")
    indeed_api_key: Optional[str] = Field(default=None, env="INDEED_API_KEY")
    remotive_api_url: str = Field(
        default="https://remotive.com/api/remote-jobs", env="REMOTIVE_API_URL"
    )
    adzuna_app_id: Optional[str] = Field(default=None, env="ADZUNA_APP_ID")
    adzuna_app_key: Optional[str] = Field(default=None, env="ADZUNA_APP_KEY")
    adzuna_country: str = Field(default="gb", env="ADZUNA_COUNTRY")

    # Embedding Model Settings
    embedding_model: str = Field(default="all-MiniLM-L6-v2", env="EMBEDDING_MODEL")
    embedding_dimension: int = Field(default=384, env="EMBEDDING_DIMENSION")

    # LLM Settings
    llm_provider: str = Field(
        default="huggingface", env="LLM_PROVIDER"
    )  # openai, anthropic, huggingface
    llm_model: str = Field(default="deepseek/deepseek-v3-turbo", env="LLM_MODEL")
    hf_inference_provider: str = Field(
        default="novita", env="HF_INFERENCE_PROVIDER"
    )  # novita, together, fireworks, etc.
    max_tokens: int = Field(default=300, env="MAX_TOKENS")
    temperature: float = Field(default=0.7, env="TEMPERATURE")

    # Application Settings
    app_name: str = Field(default="Job Search MCP Server", env="APP_NAME")
    debug: bool = Field(default=False, env="DEBUG")
    host: str = Field(default="127.0.0.1", env="HOST")
    port: int = Field(default=7860, env="PORT")

    # Storage Settings
    profiles_db_path: str = Field(
        default="./data/profiles.json", env="PROFILES_DB_PATH"
    )
    jobs_cache_path: str = Field(
        default="./data/jobs_cache.json", env="JOBS_CACHE_PATH"
    )
    embeddings_cache_path: str = Field(
        default="./data/embeddings.faiss", env="EMBEDDINGS_CACHE_PATH"
    )

    # Search Settings
    max_jobs_per_search: int = Field(default=50, env="MAX_JOBS_PER_SEARCH")
    similarity_threshold: float = Field(default=0.7, env="SIMILARITY_THRESHOLD")

    class Config:
        """Pydantic config."""

        env_file = ".env"
        env_file_encoding = "utf-8"


@lru_cache()
def get_settings() -> Settings:
    """Get cached settings instance."""
    settings = Settings()

    # Debug print the HF token
    if settings.hf_access_token:
        print(
            f"πŸ”‘ HF Access Token loaded: {settings.hf_access_token[:20]}...{settings.hf_access_token[-10:]}"
        )
    else:
        print("❌ No HF Access Token found in environment variables")

    return settings