Spaces:
Sleeping
Sleeping
File size: 3,062 Bytes
70b87af b991434 70b87af b991434 70b87af b991434 70b87af b991434 861919a 94f13dc 0a890f4 861919a 70b87af 861919a 70b87af 861919a 70b87af 861919a 70b87af 861919a 70b87af 861919a a30e1f8 70b87af 0d993ab 861919a 70b87af 861919a 70b87af 0d993ab 70b87af 861919a 70b87af 19a3622 70b87af 861919a 70b87af 861919a 21d991f 861919a 21d991f 861919a 70b87af 861919a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
import os
#import json
#import pandas as pd
import gradio as gr
'''
from llama_index.core import (
VectorStoreIndex,
download_loader,
StorageContext
)
'''
#import logging
from dotenv import load_dotenv, find_dotenv
from pathlib import Path
# from llama_index.llms.mistralai import MistralAI
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage
# from llama_index.embeddings.mistralai import MistralAIEmbedding
from src.utils_fct import *
TITLE = "RIZOA-AUCHAN Chatbot Demo"
DESCRIPTION = "Example of an assistant with Gradio, coupling with function callings and Mistral AI via its API"
PLACEHOLDER = (
"Vous pouvez me posez une question, appuyer sur Entrée pour valider"
)
EXAMPLES = ["Comment fait on pour produire du maïs ?", "Rédige moi une lettre pour faire un stage dans une exploitation agricole", "Comment reprendre une exploitation agricole ?"]
MODEL = "mistral-large-latest"
# FILE = Path(__file__).resolve()
# BASE_PATH = FILE.parents[0]
load_dotenv()
ENV_API_KEY = os.environ.get("MISTRAL_API_KEY")
# HISTORY = pd.read_csv(os.path.join(BASE_PATH, "data/cereal_price.csv"), encoding="latin-1")
# HISTORY = HISTORY[[HISTORY["memberStateName"]=="France"]]
# HISTORY['price'] = HISTORY['price'].str.replace(",", ".").astype('float64')
# Define LLMs
CLIENT = MistralClient(api_key=ENV_API_KEY)
# EMBED_MODEL = MistralAIEmbedding(model_name="mistral-embed", api_key=ENV_API_KEY)
with gr.Blocks() as demo:
with gr.Row():
with gr.Column(scale=1):
'''
gr.Image(value= os.path.join(BASE_PATH, "img/logo_rizoa_auchan.jpg"),#".\img\logo_rizoa_auchan.jpg",
height=250,
width=250,
container=False,
show_download_button=False
)
'''
gr.HTML(
value = '<img src="https://huggingface.co/spaces/rizoa-auchan-hack/hack/resolve/main/logo_rizoa_auchan.jpg">'
)
with gr.Column(scale=4):
gr.Markdown(
"""
# Bienvenue au Chatbot FAIR-PLAI
Ce chatbot est un assistant numérique, médiateur des vendeurs-acheteurs
"""
)
gr.Markdown(f""" ### {DESCRIPTION} """)
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder=PLACEHOLDER)
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history):
messages = [ChatMessage(role="user", content=message)]
response = forecast(messages)
chat_history.append((message, str(response)))
# final_response = CLIENT.chat(
# model=MODEL,
# messages=prompt
# ).choices[0].message.content
# return [[message, None],
# [None, str(response)]
# ]
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
# demo.title = TITLE
if __name__ == "__main__":
demo.launch()
|