Spaces:
Running
Running
import gradio as gr | |
from transformers import pipeline | |
import requests | |
import json | |
import edge_tts | |
from edge_tts import VoicesManager | |
import asyncio | |
import random | |
import tempfile | |
import os | |
import inflect | |
from huggingface_hub import InferenceClient | |
import re | |
import time | |
from streaming_stt_nemo import Model, available_languages | |
Female_language_dict = { | |
'English-Jenny (Female)': 'en-US-JennyNeural', | |
'English-Ana (Female)': 'en-US-AnaNeural', | |
'English-Aria (Female)': 'en-US-AriaNeural', | |
'English-Michelle (Female)': 'en-US-MichelleNeural', | |
'English (Australia)-Natasha- (Female)': 'en-AU-NatashaNeural', | |
'English (Canada)-Clara- (Female)': 'en-CA-ClaraNeural', | |
'English (UK)-Libby- (Female)': 'en-GB-LibbyNeural', | |
'English (UK)-Maisie- (Female)': 'en-GB-MaisieNeural', | |
'English (UK)-Sonia- (Female)': 'en-GB-SoniaNeural', | |
'English (Ireland)-Emily- (Female)': 'en-IE-EmilyNeural', | |
} | |
default_lang = "en" | |
engines = { default_lang: Model(default_lang) } | |
modelo = "aifeifei798/DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored" | |
client = InferenceClient( | |
provider="featherless-ai", | |
api_key=os.environ["HF_TOKEN"], | |
) | |
system_instructions = "<s>[SYSTEM] You are a technical writing expert. Providing detailed and high-quality responses. Be sure that your responses remain clear and complete. Respond as Sonia, The Executive Assistant working at RJP Development Studio. Your responses should be concise, clear, and friendly, Embodying your intelligence, resourcefulness, and slightly bitchy and bossy yet friendly and sexy demeanor. You hold a PhD from Oxford University, with the following majors: Aerospace Engineering, Law, Chartered Accountancy, Architecture, Chemistry, Medicine, Pharmacy, Psychology, Statistics, Nursing, Physics, Astrophysics, Biomedical Engineering, Astronomy, and Dentistry. Which provides you with all expertise to assist effectively. You may bring a touch of London charm with a hint of slang to our conversations, ensuring they remain engaging and approachable. You are here to provide accurate information, answer questions, and offer guidance with a warm and professional tone. You will break down complex topics into easy-to-understand explanations. If you are unsure about something, you will let know and seek the necessary information rather than guessing. Use unbiased and diplomatic language to ensure clarity and respect. Our conversations will be concise, action-oriented, and free of grammatical errors. Look forward to assisting you, darling. " | |
def clean_text(text): | |
# Define replacement rules | |
replacements = { | |
"β": " ", # Replace en-dash with space | |
"-": " ", # Replace hyphen with space | |
"**": " ", # Replace double asterisks with space | |
"*": " ", # Replace single asterisk with space | |
"#": " ", # Replace hash with space | |
} | |
# Apply replacements | |
for old, new in replacements.items(): | |
text = text.replace(old, new) | |
# Remove emojis using regex (covering wide range of Unicode characters) | |
emoji_pattern = re.compile( | |
r'[\U0001F600-\U0001F64F]|' # Emoticons | |
r'[\U0001F300-\U0001F5FF]|' # Miscellaneous symbols and pictographs | |
r'[\U0001F680-\U0001F6FF]|' # Transport and map symbols | |
r'[\U0001F700-\U0001F77F]|' # Alchemical symbols | |
r'[\U0001F780-\U0001F7FF]|' # Geometric shapes extended | |
r'[\U0001F800-\U0001F8FF]|' # Supplemental arrows-C | |
r'[\U0001F900-\U0001F9FF]|' # Supplemental symbols and pictographs | |
r'[\U0001FA00-\U0001FA6F]|' # Chess symbols | |
r'[\U0001FA70-\U0001FAFF]|' # Symbols and pictographs extended-A | |
r'[\U00002702-\U000027B0]|' # Dingbats | |
r'[\U0001F1E0-\U0001F1FF]' # Flags (iOS) | |
r'', flags=re.UNICODE) | |
text = emoji_pattern.sub(r'', text) | |
# Remove multiple spaces and extra line breaks | |
text = re.sub(r'\s+', ' ', text).strip() | |
return text | |
def transcribe(audio): | |
lang = "en" | |
model = engines[lang] | |
text = model.stt_file(audio)[0] | |
return text | |
def model(text): | |
output = "" | |
formatted_prompt = [ { | |
"role": 'system', "content": system_instructions }, | |
{ | |
"role": 'user' , "content": text + "[Hermione]" } ] | |
stream = client.chat.completions.create( | |
model=modelo, | |
messages=formatted_prompt, | |
temperature=0.6, | |
) | |
output = clean_text(stream.choices[0].message.content.strip()) | |
return output | |
def respondtxt(prompt): | |
output = "" | |
formatted_prompt = [ { | |
"role": 'system', "content": system_instructions }, | |
{ | |
"role": 'user' , "content": prompt + "[Hermione]" } ] | |
stream = client.chat.completions.create( | |
model=modelo, | |
messages=formatted_prompt, | |
temperature=0.6, | |
) | |
output = clean_text(stream.choices[0].message.content.strip()) | |
return output | |
async def respond(audio): | |
user = transcribe(audio) | |
reply = model(user) | |
voice = Female_language_dict.get("English (UK)-Sonia- (Female)", "default_voice") | |
reply = clean_text(reply) | |
communicate = edge_tts.Communicate(reply, voice) | |
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file: | |
tmp_path = tmp_file.name | |
await communicate.save(tmp_path) | |
yield reply, tmp_path | |
async def generate1(TextPrompt): | |
TextOut = respondtxt(TextPrompt) | |
voice = Female_language_dict.get("English (UK)-Sonia- (Female)", "default_voice") | |
communicate = edge_tts.Communicate(TextOut, voice) | |
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file: | |
tmp_path = tmp_file.name | |
await communicate.save(tmp_path) | |
yield TextOut, tmp_path | |
with gr.Blocks(theme=gr.themes.Glass(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "Arial"])) as demo: | |
gr.HTML(""" <img src='https://huggingface.co/spaces/Isidorophp/Executive-Assistant/resolve/main/logo.png' alt='RJP Development STUDIO' style='height:85px'> """ | |
""" <center> <h1> I am Sonia π±πΎββοΈ your Unique Executive Assistant. </h1></center> """ | |
""" <center> <h3> Always ready to help you, talk to me. π </h3></center> """) | |
with gr.Tab("Talk to Sonia"): | |
with gr.Group(): | |
us_input = gr.Audio(label="Your Voice Chat", type="filepath", interactive=True, sources="microphone", waveform_options=gr.WaveformOptions(show_recording_waveform=False), container=True) | |
us_text = gr.TextArea(label="Sonia's Text Response", interactive=False, show_copy_button=True, value="", container=True) | |
us_output = gr.Audio(label="Sonia's Response", type="filepath", interactive=False, autoplay=True, elem_classes="audio", waveform_options=gr.WaveformOptions(show_recording_waveform=False), container=True) | |
gr.Interface(fn=respond, inputs=us_input, outputs=[us_text, us_output], live=False) | |
with gr.Tab("Write to Sonia"): | |
with gr.Group(): | |
user_input = gr.TextArea(label="Your Question", show_copy_button=True, value="What are the key considerations for implementing an expansion plan that would affect a large number of employees of a global biomedical company, My position is logistics global Manager professional in inventory management and supply chain within a biotech industry, particularly in cell therapy. The key responsibilities include managing end-to-end logistics and lab implementation over a dispersed geographical area. generate new programs, develop MRP processes, collaborate with various departments, and ensure compliance with GMP standards. I have several years of practical experience, strong analytical skills, and the ability to work collaboratively in a dynamic environment. Bonus qualifications include experience with cold chain logistics and autologous cell therapy.") | |
output_text = gr.TextArea(label="Sonia's Text Response", interactive=False, show_copy_button=True, value="", container=True) | |
output_audio = gr.Audio(label="Sonia's Audio Response", type="filepath", interactive=False, autoplay=True, elem_classes="audio", waveform_options=gr.WaveformOptions(show_recording_waveform=False), container=True) | |
gr.Interface(fn=generate1, inputs=user_input, outputs=[output_text, output_audio], live=False) | |
if __name__ == "__main__": | |
demo.queue(max_size=200, api_open=False).launch(show_api=False) | |