llm_topic_modelling / pyproject.toml
seanpedrickcase's picture
Added model compatibility for OpenAI and Azure endpoints. Added some Bedrock models, now compatible with thinking models
3085585
raw
history blame
1.29 kB
[project]
name = "Large language model topic modelling"
version = "0.3.0"
description = "Topic model open text data files with a large language model."
requires-python = ">=3.10"
readme = "README.md"
dependencies = [
"pandas==2.3.3",
"gradio==5.49.1",
"transformers==4.56.0",
"spaces==0.42.1",
"boto3==1.40.48",
"pyarrow==21.0.0",
"openpyxl==3.1.5",
"markdown==3.7",
"tabulate==0.9.0",
"lxml==5.3.0",
"google-genai==1.33.0",
"openai==2.2.0",
"html5lib==1.1",
"beautifulsoup4==4.12.3",
"rapidfuzz==3.13.0",
"python-dotenv==1.1.0"
]
[project.urls]
Homepage = "https://github.com/seanpedrick-case/llm_topic_modelling"
repository = "https://github.com/seanpedrick-case/llm_topic_modelling"
[project.optional-dependencies]
dev = ["pytest"]
test = ["pytest", "pytest-cov"]
# Configuration for Ruff linter:
[tool.ruff]
line-length = 88
[tool.ruff.lint]
select = ["E", "F", "I"]
ignore = [
"E501", # line-too-long (handled with Black)
"E402", # module-import-not-at-top-of-file (sometimes needed for conditional imports)
]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"] # Allow unused imports in __init__.py
# Configuration for a Black formatter:
[tool.black]
line-length = 88
target-version = ['py310']