File size: 625 Bytes
8486306
69ef173
 
 
 
5091dc4
81c4493
 
5091dc4
69ef173
5091dc4
81c4493
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("ibm-granite/granite-3.3-8b-instruct")
model = AutoModelForCausalLM.from_pretrained("ibm-granite/granite-3.3-8b-instruct")

st.title("🏙️ Sustainable Smart City Assistant")
st.write("Ask sustainability questions")

qa_pipeline = pipeline("text-generation", model="ibm-granite/granite-3.3-8b-instruct")

query = st.text_input("Enter your question")
if query:
    with st.spinner("Thinking..."):
        resp = qa_pipeline(query, max_new_tokens=200)[0]["generated_text"]
        st.success(resp)