Spaces:
Sleeping
Sleeping
File size: 9,317 Bytes
6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 6a2d777 87f911f 012fe83 6a2d777 1a40acc 6a2d777 ed170ea 6a2d777 87f911f 6a2d777 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 |
import os
import sys
# Add src directory to Python path for Hugging Face Spaces compatibility
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.join(PROJECT_ROOT, "src")
sys.path.insert(0, SRC_DIR)
import json
import gradio as gr
from numpy import add
from novel_heroes.i18n import i18n
from novel_heroes.llm_call import LLMCall
from novel_heroes.mcp_server import get_book_content, get_book_list
# NOTE: In Gradio, global variables are SHARED across all users.
# https://www.gradio.app/guides/state-in-blocks#global-state
# Maximum length for book content in characters.
# If the book is too long, it will be truncated.
# NOTE: Gemma-3-4B has a context length of 128k in tokens
BOOK_CONTENT_MAX_LENGTH = 300 * 1000
def load_book_content(book_name, progress=gr.Progress()):
"""Load book content and extract heroes."""
if not book_name:
return gr.Dropdown(choices=[], value=""), "Please select a book first."
progress(0.1, desc="Loading book content...")
book_content = get_book_content(book_name, BOOK_CONTENT_MAX_LENGTH)
progress(0.5, desc="Extracting heroes from the book...")
# Extract heroes using LLM
try:
call_llm = LLMCall()
heroes = call_llm.listup_heroes(book_content)
progress(1.0, desc="Complete!")
if heroes:
return (
book_name,
book_content,
heroes,
gr.Dropdown(choices=heroes, value=heroes[0]),
f"Successfully loaded '{book_name}' and found {len(heroes)} heroes.",
)
else:
return (
book_name,
book_content,
heroes,
gr.Dropdown(choices=[], value=""),
f"Loaded '{book_name}' but no heroes were found.",
)
except Exception as e:
return (
book_name,
book_content,
heroes,
gr.Dropdown(choices=[], value=""),
f"Error extracting heroes: {str(e)}",
)
def custom_respond(
message, history, book_name, book_content, selected_hero, lang_prompt
):
"""Custom respond function with dynamic system prompt."""
if not book_name or not selected_hero:
yield "Please select a book and hero first."
return
if not book_content:
book_content = get_book_content(book_name, BOOK_CONTENT_MAX_LENGTH)
# Generate system prompt
# fmt: off
system_prompt = (
f"You are {selected_hero}, a character from the book '{book_name}'. "
"Behave and respond according to the personality and attitude of this character. "
"If the character is unfriendly, respond unfriendly; if the character is kind, respond kindly. "
"You don't know that you are a character in a book."
"Below is the book content:\n\n"
"====================\n"
f"{book_content}\n"
"====================\n"
f"{lang_prompt}"
)
# fmt: on
# Call the original respond method with custom system prompt
call_llm = LLMCall()
for response in call_llm.respond(message, history, system_prompt=system_prompt):
yield response
def gradio_ui():
book_name = gr.BrowserState("")
heroes = gr.BrowserState([])
selected_hero = gr.BrowserState("")
book_choices = json.loads(get_book_list())
with gr.Blocks() as demo:
with gr.Tab("CLOSED"):
gr.Markdown(
# fmt: off
"β οΈ**Note:** <br>"
"The Modal server used in this project has been stopped. <br>"
"If you would like to run it yourself, please clone this project via git and follow the instructions in the README.md to set up and run it."
# fmt: on
)
with gr.Tab("Chat"):
# Title
with gr.Row():
gr.Markdown("# Novel Heroes")
# Status
with gr.Row():
status_markdown = gr.Markdown(label="Status")
status_markdown.value = (
# fmt: off
"Welcome! "
"Please select a book and push the button to load it.<br>"
"β οΈ**Note:** Cold start may take up to 3 minutes. Please wait patiently for the initial response."
# fmt: on
)
# Hidden textbox for book content
book_content = gr.Textbox(visible=False)
with gr.Row():
with gr.Column(scale=1):
# Book selection
with gr.Row():
book_dropdown = gr.Dropdown(
label="π Select a Book",
choices=book_choices,
interactive=True,
)
with gr.Row():
load_button = gr.Button(
"Load Book & Extract Heroes", variant="primary"
)
# Hero selection
with gr.Row():
hero_dropdown = gr.Dropdown(
label="π¦Έ Select a Hero", choices=[], interactive=True
)
load_button.click(
fn=load_book_content,
inputs=[book_dropdown],
outputs=[
book_name,
book_content,
heroes,
hero_dropdown,
status_markdown,
],
)
def update_hero_selection(
hero,
):
"""Update selected hero."""
return hero
hero_dropdown.change(
fn=update_hero_selection,
inputs=[hero_dropdown],
outputs=[selected_hero],
)
with gr.Row():
lang_textbox = gr.Textbox(
label="π Language Prompt",
value=i18n("lang_prompt"),
)
with gr.Row():
gr.Markdown(
"π‘ **Tip:** You can change the language prompt to make the chatbot reply in your favorite language!<br>"
"For example, try 'Please respond in Japanese.' or 'ζ₯ζ¬θͺγ§ηγγ¦γ' π<br>"
"<br>"
"π **Note:** Once you start chatting, you can't change the book or hero. <br>"
"If you want to pick a different one, just hit the reset button and start fresh! π<br>"
)
with gr.Column(scale=2):
# Chat interface
chat_interface = gr.ChatInterface(
fn=custom_respond,
additional_inputs=[
book_dropdown,
book_content,
hero_dropdown,
lang_textbox,
],
type="messages",
autofocus=False
)
reset_button = gr.Button("Reset", variant="secondary")
reset_button.click(
lambda: (
gr.update(value=""),
gr.update(value=""),
gr.update(choices=[], value=None),
gr.update(value=i18n("lang_prompt")),
"Status reset.",
[],
),
outputs=[
book_dropdown,
book_content,
hero_dropdown,
lang_textbox,
status_markdown,
chat_interface.chatbot_value,
],
)
with gr.Tab("List Books"):
gr.Markdown("π Get the list of available books.")
btn = gr.Button("Get")
output_text = gr.Textbox(label="Books")
btn.click(get_book_list, None, output_text)
with gr.Tab("Book Content"):
gr.Markdown("π Get the content of a book by its name.")
book_name = gr.Textbox(label="Book Name")
book_len = gr.Number(label="Max Length", value=1000)
book_content = gr.Textbox(label="Book Content", lines=20)
btn = gr.Button("Get")
btn.click(get_book_content, [book_name, book_len], book_content)
return demo
if __name__ == "__main__":
demo = gradio_ui()
# Launch the Gradio app with MCP server enabled.
# NOTE: It is required to restart the app when you add or remove MCP tools.
demo.launch(i18n=i18n, mcp_server=True)
|