yaswanthkocherla's picture
Update app.py
69ef173 verified
raw
history blame contribute delete
625 Bytes
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("ibm-granite/granite-3.3-8b-instruct")
model = AutoModelForCausalLM.from_pretrained("ibm-granite/granite-3.3-8b-instruct")
st.title("🏙️ Sustainable Smart City Assistant")
st.write("Ask sustainability questions")
qa_pipeline = pipeline("text-generation", model="ibm-granite/granite-3.3-8b-instruct")
query = st.text_input("Enter your question")
if query:
with st.spinner("Thinking..."):
resp = qa_pipeline(query, max_new_tokens=200)[0]["generated_text"]
st.success(resp)