wuhp commited on
Commit
051fe19
·
verified ·
1 Parent(s): b1a4014

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -92
app.py CHANGED
@@ -12,9 +12,9 @@ from google.genai.types import Tool, GoogleSearch
12
  # -----------------------------------------------------------------------------
13
  # Configuration
14
  # -----------------------------------------------------------------------------
15
- MODEL_ID: str = "gemini-2.5-flash-preview-04-17"
16
- WORKSPACE_DIR: str = "workspace"
17
- SYSTEM_INSTRUCTION: str = (
18
  "You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
19
  "Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
20
  "and respond with exactly one JSON object with keys:\n"
@@ -24,27 +24,21 @@ SYSTEM_INSTRUCTION: str = (
24
  "Do not include extra text or markdown."
25
  )
26
 
27
- # In‑memory session store: maps session IDs -> state dicts
28
  state_store: Dict[str, Dict[str, Any]] = {}
29
 
30
- # -----------------------------------------------------------------------------
31
- # Helper functions
32
- # -----------------------------------------------------------------------------
33
-
34
- def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> Dict[str, Any]:
35
- """Initialise chat with Gemini + create a local workspace for the new app."""
36
  os.makedirs(WORKSPACE_DIR, exist_ok=True)
37
-
38
- # Gemini client & chat session
39
  client = genai.Client(api_key=gemini_key)
40
  config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
41
  tools = [Tool(google_search=GoogleSearch())]
42
  chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
43
-
44
- # Local project folder (used as upload snapshot source)
45
  local_path = os.path.join(WORKSPACE_DIR, repo_name)
46
  os.makedirs(local_path, exist_ok=True)
47
-
48
  return {
49
  "chat": chat,
50
  "hf_token": hf_token,
@@ -57,34 +51,24 @@ def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str)
57
  "logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."]
58
  }
59
 
60
-
61
  def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:
62
- """Send *user_msg* to Gemini, act on the JSON response, and return assistant reply."""
63
  chat = state["chat"]
64
  logs = state.setdefault("logs", [])
65
-
66
  logs.append(f"> **User**: {user_msg}")
67
  resp = chat.send_message(user_msg)
68
  logs.append("Received response from Gemini.")
69
 
70
- # ---------------------------------------------------------------------
71
- # Parse Gemini JSON answer
72
- # ---------------------------------------------------------------------
73
  try:
74
  data = json.loads(resp.text)
75
- framework: str = data["framework"]
76
- files: Dict[str, str] = data.get("files", {})
77
- reply_msg: str = data.get("message", "")
78
  except Exception:
79
  logs.append("⚠️ Failed to parse assistant JSON.\n" + resp.text)
80
  return "⚠️ Parsing error. Check logs.", state
81
 
82
- # ---------------------------------------------------------------------
83
- # Create the target Space on first run
84
- # ---------------------------------------------------------------------
85
  if not state["created"]:
86
  full_repo = f"{state['hf_username']}/{state['repo_name']}"
87
- logs.append(f"Creating HF Space **{full_repo}** (template '{framework}') …")
88
  create_repo(
89
  repo_id=full_repo,
90
  token=state["hf_token"],
@@ -98,9 +82,6 @@ def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str,
98
  "embed_url": f"https://huggingface.co/spaces/{full_repo}",
99
  })
100
 
101
- # ---------------------------------------------------------------------
102
- # Write / overwrite files
103
- # ---------------------------------------------------------------------
104
  if files:
105
  logs.append(f"Writing {len(files)} file(s): {list(files)}")
106
  for relpath, content in files.items():
@@ -109,76 +90,54 @@ def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str,
109
  with open(dest, "w", encoding="utf‑8") as fp:
110
  fp.write(content)
111
 
112
- # ---------------------------------------------------------------------
113
- # Push snapshot
114
- # ---------------------------------------------------------------------
115
- logs.append("Uploading snapshot to Hugging Face …")
116
  HfApi(token=state["hf_token"]).upload_folder(
117
  folder_path=state["local_path"],
118
  repo_id=state["repo_id"],
119
  repo_type="space",
120
  )
121
  logs.append("Snapshot upload complete.")
 
122
 
123
- return reply_msg, state
124
-
125
- # -----------------------------------------------------------------------------
126
- # Gradio UI
127
- # -----------------------------------------------------------------------------
128
  with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
129
- # -------------------------------------------------------------------------
130
- # OAuth UI row (always visible)
131
- # -------------------------------------------------------------------------
132
  with gr.Row():
133
  login_btn = gr.LoginButton(
134
- logout_value="Logout ({username})", # label once signed in
135
  variant="huggingface",
136
  size="lg"
137
  )
138
  status_md = gr.Markdown("Not logged in")
139
-
140
- # Activate the login button’s routes (silences the warning)
141
  login_btn.activate()
142
 
143
- # Callback to refresh the status text whenever OAuth state changes
144
  def show_profile(profile: gr.OAuthProfile | None):
145
  if profile is None:
146
  return "*Not logged in.*"
147
  return f"Logged in as **{profile.username}**"
148
-
149
  demo.load(fn=show_profile, inputs=None, outputs=status_md)
150
 
151
- # -------------------------------------------------------------------------
152
- # Main app controls (hidden behind OAuth)
153
- # -------------------------------------------------------------------------
154
  with gr.Row():
155
  with gr.Column(scale=1):
156
- gemini_key = gr.Textbox(label="Gemini API Key", type="password")
157
- hf_user = gr.Textbox(label="HF Username")
158
- repo_name = gr.Textbox(label="New App (repo) name")
159
- session_id = gr.Textbox(value="", visible=False)
160
- start_btn = gr.Button("Start a new app")
161
-
162
  with gr.Column(scale=3):
163
- chatbot = gr.Chatbot(type="messages")
164
- logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
165
- preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
166
-
167
- user_msg = gr.Textbox(label="Your message")
168
- send_btn = gr.Button("Send", interactive=False)
169
-
170
- # ---------------------------------------------------------------------
171
- # Callback: start a new app (requires OAuth token)
172
- # ---------------------------------------------------------------------
173
- def on_start(g_key: str, h_user: str, r_name: str, oauth_token: gr.OAuthToken | None):
 
174
  if oauth_token is None:
175
- return (
176
- gr.Error("Please *Sign in with Hugging Face* first."),
177
- "", "", gr.update(interactive=False)
178
- )
179
-
180
  new_id = str(uuid.uuid4())
181
- state_store[new_id] = start_app(g_key, oauth_token.token, h_user, r_name)
182
  logs = "\n".join(state_store[new_id]["logs"])
183
  return new_id, logs, "<p>Awaiting first instruction…</p>", gr.update(interactive=True)
184
 
@@ -188,24 +147,23 @@ with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
188
  outputs=[session_id, logs_display, preview_iframe, send_btn],
189
  )
190
 
191
- # ---------------------------------------------------------------------
192
- # Callback: send a chat message
193
- # ---------------------------------------------------------------------
194
- def on_send(msg: str, chat_history: list[list[str]], sess_id: str):
 
195
  if not sess_id or sess_id not in state_store:
196
- err = "Error: No active session. Click *Start a new app* first."
197
- return chat_history + [("", err)], sess_id, "", ""
198
-
199
  reply, new_state = handle_message(msg, state_store[sess_id])
200
  state_store[sess_id] = new_state
201
-
202
- chat_history.append((msg, reply))
203
- logs = "\n".join(new_state["logs"])
204
- embed = (
205
- f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
206
- if new_state.get("embed_url") else ""
207
- )
208
- return chat_history, sess_id, logs, embed
209
 
210
  send_btn.click(
211
  on_send,
@@ -218,8 +176,5 @@ with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
218
  outputs=[chatbot, session_id, logs_display, preview_iframe],
219
  )
220
 
221
- # -----------------------------------------------------------------------------
222
- # Launch the Gradio app
223
- # -----------------------------------------------------------------------------
224
  if __name__ == "__main__":
225
  demo.launch()
 
12
  # -----------------------------------------------------------------------------
13
  # Configuration
14
  # -----------------------------------------------------------------------------
15
+ MODEL_ID = "gemini-2.5-flash-preview-04-17"
16
+ WORKSPACE_DIR = "workspace"
17
+ SYSTEM_INSTRUCTION = (
18
  "You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
19
  "Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
20
  "and respond with exactly one JSON object with keys:\n"
 
24
  "Do not include extra text or markdown."
25
  )
26
 
 
27
  state_store: Dict[str, Dict[str, Any]] = {}
28
 
29
+ def start_app(
30
+ gemini_key: str,
31
+ hf_token: str,
32
+ hf_username: str,
33
+ repo_name: str
34
+ ) -> Dict[str, Any]:
35
  os.makedirs(WORKSPACE_DIR, exist_ok=True)
 
 
36
  client = genai.Client(api_key=gemini_key)
37
  config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
38
  tools = [Tool(google_search=GoogleSearch())]
39
  chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
 
 
40
  local_path = os.path.join(WORKSPACE_DIR, repo_name)
41
  os.makedirs(local_path, exist_ok=True)
 
42
  return {
43
  "chat": chat,
44
  "hf_token": hf_token,
 
51
  "logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."]
52
  }
53
 
 
54
  def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:
 
55
  chat = state["chat"]
56
  logs = state.setdefault("logs", [])
 
57
  logs.append(f"> **User**: {user_msg}")
58
  resp = chat.send_message(user_msg)
59
  logs.append("Received response from Gemini.")
60
 
 
 
 
61
  try:
62
  data = json.loads(resp.text)
63
+ framework = data["framework"]
64
+ files = data.get("files", {})
65
+ reply = data.get("message", "")
66
  except Exception:
67
  logs.append("⚠️ Failed to parse assistant JSON.\n" + resp.text)
68
  return "⚠️ Parsing error. Check logs.", state
69
 
 
 
 
70
  if not state["created"]:
71
  full_repo = f"{state['hf_username']}/{state['repo_name']}"
 
72
  create_repo(
73
  repo_id=full_repo,
74
  token=state["hf_token"],
 
82
  "embed_url": f"https://huggingface.co/spaces/{full_repo}",
83
  })
84
 
 
 
 
85
  if files:
86
  logs.append(f"Writing {len(files)} file(s): {list(files)}")
87
  for relpath, content in files.items():
 
90
  with open(dest, "w", encoding="utf‑8") as fp:
91
  fp.write(content)
92
 
 
 
 
 
93
  HfApi(token=state["hf_token"]).upload_folder(
94
  folder_path=state["local_path"],
95
  repo_id=state["repo_id"],
96
  repo_type="space",
97
  )
98
  logs.append("Snapshot upload complete.")
99
+ return reply, state
100
 
 
 
 
 
 
101
  with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
 
 
 
102
  with gr.Row():
103
  login_btn = gr.LoginButton(
104
+ logout_value="Logout ({username})",
105
  variant="huggingface",
106
  size="lg"
107
  )
108
  status_md = gr.Markdown("Not logged in")
 
 
109
  login_btn.activate()
110
 
 
111
  def show_profile(profile: gr.OAuthProfile | None):
112
  if profile is None:
113
  return "*Not logged in.*"
114
  return f"Logged in as **{profile.username}**"
 
115
  demo.load(fn=show_profile, inputs=None, outputs=status_md)
116
 
 
 
 
117
  with gr.Row():
118
  with gr.Column(scale=1):
119
+ gemini_key = gr.Textbox(label="Gemini API Key", type="password")
120
+ hf_user = gr.Textbox(label="HF Username")
121
+ repo_name = gr.Textbox(label="New App (repo) name")
122
+ session_id = gr.Textbox(value="", visible=False)
123
+ start_btn = gr.Button("Start a new app")
 
124
  with gr.Column(scale=3):
125
+ chatbot = gr.Chatbot(type="messages", value=[])
126
+ logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
127
+ preview_iframe= gr.HTML("<p>No deployed app yet.</p>")
128
+ user_msg = gr.Textbox(label="Your message")
129
+ send_btn = gr.Button("Send", interactive=False)
130
+
131
+ def on_start(
132
+ gemini_key: str,
133
+ hf_username: str,
134
+ repo_name: str,
135
+ oauth_token: gr.OAuthToken | None
136
+ ):
137
  if oauth_token is None:
138
+ return gr.Error("Please *Sign in with Hugging Face* first."), "", "", gr.update(interactive=False)
 
 
 
 
139
  new_id = str(uuid.uuid4())
140
+ state_store[new_id] = start_app(gemini_key, oauth_token.token, hf_username, repo_name)
141
  logs = "\n".join(state_store[new_id]["logs"])
142
  return new_id, logs, "<p>Awaiting first instruction…</p>", gr.update(interactive=True)
143
 
 
147
  outputs=[session_id, logs_display, preview_iframe, send_btn],
148
  )
149
 
150
+ def on_send(
151
+ msg: str,
152
+ chat_history: list[dict],
153
+ sess_id: str
154
+ ):
155
  if not sess_id or sess_id not in state_store:
156
+ return [], sess_id, "", ""
 
 
157
  reply, new_state = handle_message(msg, state_store[sess_id])
158
  state_store[sess_id] = new_state
159
+ history = chat_history or []
160
+ history.append({"role": "user", "content": msg})
161
+ history.append({"role": "assistant", "content": reply})
162
+ logs = "\n".join(new_state["logs"])
163
+ embed = ""
164
+ if new_state.get("embed_url"):
165
+ embed = f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
166
+ return history, sess_id, logs, embed
167
 
168
  send_btn.click(
169
  on_send,
 
176
  outputs=[chatbot, session_id, logs_display, preview_iframe],
177
  )
178
 
 
 
 
179
  if __name__ == "__main__":
180
  demo.launch()