import chainlit as cl from chainlit.input_widget import Switch import google.generativeai as gemini_client from qdrant_client import QdrantClient import vanna from vanna.vannadb import VannaDB_VectorStore from vanna.google import GoogleGeminiChat import textwrap import os #API KEY GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY") QDRANT_URL = os.environ.get("QDRANT_URL") QDRANT_API_KEY = os.environ.get("QDRANT_API_KEY") VANNA_MODEL = os.environ.get("VANNA_MODEL") VANNA_API_KEY = os.environ.get("VANNA_API_KEY") #CONNECTION client = QdrantClient( url=QDRANT_URL, api_key=QDRANT_API_KEY, ) gemini_client.configure(api_key=GEMINI_API_KEY) class MyVanna(VannaDB_VectorStore, GoogleGeminiChat): def __init__(self, config=None): MY_VANNA_MODEL = VANNA_MODEL VannaDB_VectorStore.__init__(self, vanna_model=VANNA_MODEL, vanna_api_key=VANNA_API_KEY, config=config) GoogleGeminiChat.__init__(self, config={'api_key': GEMINI_API_KEY, 'model': 'gemini-1.5-flash',"language":"id-ID"}) # Error handling for session disconnection async def handle_request(self, scope, receive, send): try: socket = self._get_socket(sid) except KeyError: print("Session is disconnected") return # Continue with the rest of the request handling vn = MyVanna() @cl.set_chat_profiles async def chat_profile(): return [ cl.ChatProfile( name="Databot LAN", markdown_description="Temukan data statistik sektoral LANRI dengan bantuan AI", icon="/public/terminal.svg", starters=[ cl.Starter( label="JF AK menurut jenis kelamin?", message="jumlah jabatan fungsional analis kebijakan menurut jenis kelamin?", icon="/public/idea.svg", ), cl.Starter( label="Alumni Latsar CPNS tahun 2023?", message="jumlah alumni Pelatihan Dasar CPNS tahun 2023 menurut asal instansi?", icon="/public/learn.svg", ), cl.Starter( label="PPPK mengakses Swajar Orientasi?", message="jumlah PPPK yang mengakses swajar orientasi PPPK berdasarkan nama jabatan?", icon="/public/terminal.svg", ), cl.Starter( label="Alumni PKN I per tahun?", message="jumlah alumni PKN I berdasarkan tahun diklat?", icon="/public/write.svg", ) ], ), cl.ChatProfile( name="Infobot LAN", markdown_description="Cari tahu informasi umum mengenai LAN RI dengan bantuan AI", icon="/public/write.svg", starters=[ cl.Starter( label="Apa itu Bangkom ASN?", message="Apa yang dimaksud dengan Pengembangan Kompetensi?", icon="/public/terminal.svg", ), cl.Starter( label="Minimal JP Bangkom ASN?", message="Berapa minimal jam pelajaran pengembangan kompetensi bagi PNS dalam setahun?", icon="/public/idea.svg", ), cl.Starter( label="Masa prajabatan CPNS?", message="Berapa lama masa prajabatan bagi seorang CPNS?", icon="/public/learn.svg", ), cl.Starter( label="Berapa jam pelajaran PKN I?", message="Berapa jam pelajaran yang didapatkan ketika seorang ASN mengikuti pelatihan kepemimpinan nasional tingkat 1?", icon="/public/write.svg", ) ], ), ] @cl.on_message async def main(message: cl.Message): if cl.user_session.get("chat_profile") == "Databot LAN": human_query = message.content vn.connect_to_postgres(host=os.environ.get("DB_HOST"), dbname=os.environ.get("DB_NAME"), user=os.environ.get("DB_USER"), password=os.environ.get("DB_PASS"), port=os.environ.get("DB_PORT")) def gen_query(human_query: str): sql_query = vn.generate_sql(human_query) return sql_query def execute_query(query): df = vn.run_sql(query) return df def human_answer(human_query,df): answer = vn.generate_summary(human_query, df) return answer def plot(human_query, sql, df): plotly_code = vn.generate_plotly_code(question=human_query, sql=sql, df=df) fig = vn.get_plotly_figure(plotly_code=plotly_code, df=df) elements = [cl.Plotly(name="chart", figure=fig, display="inline", root=False)] return elements settings = await cl.ChatSettings( [ Switch( id="is_chart", label="Tampilkan grafik", initial=False ) ] ).send() sql_query = gen_query(human_query) df = execute_query(sql_query) answer_human = human_answer(human_query, df) if settings["is_chart"]: elements = plot(human_query, sql_query, df) await cl.Message( content=human_query, elements=elements, author="Databot LANRI").send() await cl.Message( content=f"{answer_human}").send() elif cl.user_session.get("chat_profile") == "Infobot LAN": ask = message.content search_result = client.search( collection_name='lan_knowledge', query_vector=gemini_client.embed_content( model="models/embedding-001", content=ask, task_type="retrieval_query", )["embedding"], limit = 2, ) def make_prompt(question, relevant_passage,data_source): escaped = relevant_passage.replace("'", "").replace('"', "").replace("\n", " ") prompt = textwrap.dedent("""You are a helpful and informative bot that answers questions using text from the reference passage included below. \ Be sure to respond in a complete sentence, being comprehensive, including all relevant background information. \ However, you are talking to a non-technical audience, so be sure to break down complicated concepts and \ strike a friendly and converstional tone. \ If the passage is irrelevant to the answer, you may cut or ignore it. \ Include the data source at the end of the answer, separate by enter \ QUESTION: '{question}' PASSAGE: '{relevant_passage}' Sumber Data: {data_source} ANSWER: (answer with Bahasa Indonesia) if the passage irrelevant, answer: Maaf, sementara Infobot LANRI belum dapat menjawab pertanyaan tersebut. Silakan mengajukan pertanyaan lainnya. """).format(question=question, relevant_passage=escaped,data_source=data_source) return prompt prompt = make_prompt(question=ask, relevant_passage = search_result[0].payload['answer'], data_source = search_result[0].payload['pasal'] + ' ' + search_result[0].payload['peraturan']) model = gemini_client.GenerativeModel('models/gemini-1.5-flash') answer = model.generate_content(prompt) await cl.Message( content=f"{answer.text}").send()