Spaces:
Sleeping
Sleeping
File size: 11,077 Bytes
c10dac6 1b76492 c10dac6 1b76492 c10dac6 d3a918c e019b8e c10dac6 d3a918c 1b76492 c10dac6 1b76492 d3a918c c10dac6 1b76492 d3a918c c10dac6 d3a918c c10dac6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 |
from matplotlib import category
import gradio as gr
import pandas as pd
from .email_reader import EmailReader
#from langchain.vectorstores import Chroma
from langchain_community.vectorstores import Chroma
#from langchain.vectorstores import Chroma
from langchain_community.embeddings import HuggingFaceEmbeddings
#from langchain_huggingface import HuggingFaceEmbeddings
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
import datetime
import os
from langchain_community.chat_models import ChatOllama
from config import *
from transformers import pipeline
import logging
import re
from openai import OpenAI
import json
# Initialize zero-shot classification model
Data_path = os.path.join('Email_Data', 'emails.xlsx')
class EmailResponder:
"""Class to handle email responses and sentiment analysis."""
def __init__(self):
"""Initialize the EmailResponder object."""
try:
self.classifier = pipeline("zero-shot-classification", model=ZERO_SHOT_MODEL)
self.text_labels = ['Positive', 'Negative', 'Neutral']
self.template = template
#self.embed_model = HuggingFaceEmbeddings(model_name=EMBED_MODEL_NAME)
model_name = "sentence-transformers/all-mpnet-base-v2"
model_kwargs = {'device': 'cpu'}
encode_kwargs = {'normalize_embeddings': False}
self.embed_model = HuggingFaceEmbeddings(
model_name=EMBED_MODEL_NAME,
model_kwargs=model_kwargs,
encode_kwargs=encode_kwargs
)
self.DB_PATH = DB_PATH
#self.vectorstore = Chroma(persist_directory=self.DB_PATH, embedding_function=self.embed_model)
#self.retriever = self.vectorstore.as_retriever()
self.prompt = ChatPromptTemplate.from_template(self.template)
self.ollama_llm = OLLAMA_MODEL
self.model_local = ChatOllama(model=self.ollama_llm)
'''
self.chain = (
{"context": self.retriever, "question": RunnablePassthrough()}
| self.prompt
| self.model_local
| StrOutputParser()
)
'''
with open('emailCategories.json') as user_file:
self.jasonFile = json.load(user_file)
except Exception as e:
logging.error(f"Error initializing EmailResponder: {e}")
raise
def generate_response(self, body, subject):
"""Generate a response based on sentiment analysis and a pre-defined model chain.
Args:
body (str): The body of the email.
subject (str): The subject of the email.
Returns:
Tuple[str, float, str]: A tuple containing sentiment label, sentiment score, and the generated reply.
"""
try:
# Assuming you want to analyze the body for sentiment
result = self.classifier(body, self.text_labels, multi_label=False)
sentiment_label = result['labels'][0]
sentiment_score = result['scores'][0]
today = datetime.date.today()
orderNum = 0xFFFF
self.getShopifyInfo(orderNum)
query = f"Todays date -{today}\n sentiment - {sentiment_label}\n Subject -{subject}\n Body-{body} "
#reply_body = self.chain.invoke(query)
reply_body = 0
orderNum = self.getOrderNumber(body, subject)
if orderNum != 0xFFFF:
self.getShopifyInfo(orderNum)
reply_body = self.get_GPTcompletion(orderNum, body)
return sentiment_label, sentiment_score, reply_body
except Exception as e:
logging.error(f"Error generating response: {e}")
raise
def getOrderNumber(self,body, subject):
#x = re.search( (body.replace("#"," ") ).lower(),"\b(?:Order\s*[:\-]?\s*)\d+\b" )
Inbody = (body
.replace("#"," ")
.replace("number"," ")
.lower()
.split() )
Insubject = (subject
.replace("#"," ")
.replace("number"," ")
.lower()
.split() )
ordNum =0xFFFF
for r in range (len(Inbody)-1):
if Inbody[r]=="order":
indices = [i for i, item in enumerate(Inbody[r:]) if item.isnumeric()]
if len (indices) !=0:
ordNum = Inbody[r+ indices[0]]
print (ordNum)
break
if ordNum == 0xFFFF:
for r in range (len(Insubject)-1):
if Insubject[r]=="order":
indices = [i for i, item in enumerate(Insubject[r:]) if item.isnumeric()]
if len (indices) !=0:
ordNum = Insubject[r+ indices[0]]
print (ordNum)
break
return ordNum
def getShopifyInfo(self, orderNum):
pass
def get_GPTcompletion(self, prompt,orderNum, model="gpt-4-1106-preview"):
client = OpenAI(api_key = self.jasonFile["openai"])
GptInstruction = f"""categorize this text ' {prompt} ' as follows:
if it belongs to: {self.jasonFile["categories"][0]["cat1"]} then just say {self.jasonFile["categories"][0].key()} or
if it belongs to: {self.jasonFile["categories"][1]["cat2"] } then generate a reponse using this order number {orderNum}
otherwise just say not found """
messages = [{"role": "user", "content": GptInstruction}]
response = client.chat.completions.create(
model=model,
messages=messages,
temperature=0.7,
)
return response.choices[0].message.content
class EmailProcessor(EmailResponder):
"""Class to process emails and manage email-related tasks."""
def __init__(self):
"""Initialize the EmailProcessor object."""
super().__init__()
def fetch_and_save_emails(self, email_user, email_pass):
"""Fetch unseen emails and save them to an Excel file.
Args:
email_user (str): Email username.
email_pass (str): Email password.
Returns:
str: Success message or error message.
"""
try:
reader = EmailReader('imap-mail.outlook.com', email_user, email_pass)
reader.connect()
reader.login()
reader.fetch_unseen_emails()
reader.save_emails_to_excel(Data_path)
return "Emails fetched and saved to 'emails.xlsx'"
except Exception as e:
logging.error(f"Error fetching and saving emails: {e}")
raise
def load_emails(self):
"""Load emails from the Excel file.
Returns:
Tuple[str, str, str, int]: A tuple containing sender, subject, body, and email index.
"""
try:
df = pd.read_excel(Data_path)
if not df.empty:
return self.update_email_content(df, 0)
return "N/A", "N/A", "N/A", 0
except Exception as e:
logging.error(f"Error loading emails: {e}")
raise
def send_reply_and_move_next(self, email_user, email_pass, index, reply_body):
"""Send a reply to the current email and move to the next one.
Args:
email_user (str): Email username.
email_pass (str): Email password.
index (int): Current email index.
reply_body (str): Reply body.
Returns:
Tuple[str, str, str, str, int, str, str, str]: A tuple containing response message, sender, subject, body, index,
and empty reply and sentiment fields.
"""
try:
df = pd.read_excel(Data_path)
if 0 <= index < len(df):
# Retrieve the message ID of the current email
msg_id = df.iloc[index]['Message ID'] # Replace 'Message ID' with the actual column name for message IDs in your DataFrame
reader = EmailReader('imap-mail.outlook.com', email_user, email_pass)
reader.connect()
reader.login()
send_status = reader.reply_to_email(msg_id, reply_body)
reader.close_connection()
response_message = send_status if send_status else "Reply sent successfully!"
From, Subject, Body, index = self.update_email_content(df, index)
# Clear reply body and sentiment fields
return response_message, From, Subject, Body, index, "", "", ""
else:
return "Invalid email index.", "", "", "", index, "", "", ""
except Exception as e:
logging.error(f"Error sending reply and moving next: {e}")
raise
def update_email_content(self, df, index):
"""Update email content based on the index.
Args:
df (pd.DataFrame): DataFrame containing email data.
index (int): Email index.
Returns:
Tuple[str, str, str, int]: A tuple containing sender, subject, body, and email index.
"""
try:
if 0 <= index < len(df):
email = df.iloc[index]
return email["From"], email["Subject"], str(email["Body"]), index
return "N/A", "N/A", "N/A", index
except Exception as e:
logging.error(f"Error updating email content: {e}")
raise
def navigate_emails(self, direction, index):
"""Navigate through emails based on the given direction.
Args:
direction (str): Navigation direction ('next' or 'prev').
index (int): Current email index.
Returns:
Tuple[str, str, str, int]: A tuple containing sender, subject, body, and email index.
"""
try:
df = pd.read_excel(Data_path)
if direction == "next":
index = index + 1 if index < len(df) - 1 else index
elif direction == "prev":
index = index - 1 if index > 0 else index
return self.update_email_content(df, index)
except Exception as e:
logging.error(f"Error navigating emails: {e}")
raise
def show_popup(self, response_message):
"""Display a popup with the given response message.
Args:
response_message (str): Response message.
Returns:
gr.Info: Gradio Info object.
"""
try:
if response_message:
gr.update(value=response_message, visible=True)
return gr.Info(text=response_message)
except Exception as e:
logging.error(f"Error showing popup: {e}")
raise
|