Spaces:
Running
Running
from transformers import pipeline | |
# Load the Hugging Face pipeline | |
def load_model(task="summarization", framework="pt"): | |
""" | |
Load the specified task model using Hugging Face's pipeline. | |
Default is PyTorch ('pt') as the framework. | |
""" | |
model = pipeline(task=task, model="facebook/bart-large-cnn", framework=framework) | |
return model | |
# Summarization function | |
def summarize_text(model, text): | |
""" | |
Summarize the provided legal text. | |
""" | |
if not text.strip(): | |
return "Please provide input text." | |
result = model(text, max_length=150, min_length=40, do_sample=False) | |
return result[0]['summary_text'] | |
# Question Answering function | |
def answer_question(model, question, context): | |
""" | |
Answer a question based on the provided legal context. | |
""" | |
if not context.strip() or not question.strip(): | |
return "Please provide both a context and a question." | |
result = model(question=question, context=context) | |
return result['answer'] |