miaoyibo commited on
Commit
4487773
Β·
1 Parent(s): 3425837
Files changed (4) hide show
  1. README.md +1 -18
  2. app.py +12 -23
  3. pyproject.toml +2 -2
  4. serve_vllm.sh +0 -4
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- title: Chat with Kimi-VL-A3B-Thinking
3
  emoji: πŸ€”
4
  colorFrom: green
5
  colorTo: blue
@@ -9,20 +9,3 @@ app_file: app.py
9
  pinned: false
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
13
-
14
-
15
- ## Citation
16
-
17
- ```
18
- @misc{kimiteam2025kimivltechnicalreport,
19
- title={{Kimi-VL} Technical Report},
20
- author={Kimi Team and Angang Du and Bohong Yin and Bowei Xing and Bowen Qu and Bowen Wang and Cheng Chen and Chenlin Zhang and Chenzhuang Du and Chu Wei and Congcong Wang and Dehao Zhang and Dikang Du and Dongliang Wang and Enming Yuan and Enzhe Lu and Fang Li and Flood Sung and Guangda Wei and Guokun Lai and Han Zhu and Hao Ding and Hao Hu and Hao Yang and Hao Zhang and Haoning Wu and Haotian Yao and Haoyu Lu and Heng Wang and Hongcheng Gao and Huabin Zheng and Jiaming Li and Jianlin Su and Jianzhou Wang and Jiaqi Deng and Jiezhong Qiu and Jin Xie and Jinhong Wang and Jingyuan Liu and Junjie Yan and Kun Ouyang and Liang Chen and Lin Sui and Longhui Yu and Mengfan Dong and Mengnan Dong and Nuo Xu and Pengyu Cheng and Qizheng Gu and Runjie Zhou and Shaowei Liu and Sihan Cao and Tao Yu and Tianhui Song and Tongtong Bai and Wei Song and Weiran He and Weixiao Huang and Weixin Xu and Xiaokun Yuan and Xingcheng Yao and Xingzhe Wu and Xinxing Zu and Xinyu Zhou and Xinyuan Wang and Y. Charles and Yan Zhong and Yang Li and Yangyang Hu and Yanru Chen and Yejie Wang and Yibo Liu and Yibo Miao and Yidao Qin and Yimin Chen and Yiping Bao and Yiqin Wang and Yongsheng Kang and Yuanxin Liu and Yulun Du and Yuxin Wu and Yuzhi Wang and Yuzi Yan and Zaida Zhou and Zhaowei Li and Zhejun Jiang and Zheng Zhang and Zhilin Yang and Zhiqi Huang and Zihao Huang and Zijia Zhao and Ziwei Chen},
21
- year={2025},
22
- eprint={2504.07491},
23
- archivePrefix={arXiv},
24
- primaryClass={cs.CV},
25
- url={https://arxiv.org/abs/2504.07491},
26
- }
27
- ```
28
-
 
1
  ---
2
+ title: Chat with Kimi-Dev-72B
3
  emoji: πŸ€”
4
  colorFrom: green
5
  colorTo: blue
 
9
  pinned: false
10
  ---
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app.py CHANGED
@@ -11,7 +11,6 @@ import pdb
11
 
12
  import openai
13
 
14
- import threading
15
 
16
  from kimi_dev.serve.frontend import reload_javascript
17
  from kimi_dev.serve.utils import (
@@ -102,16 +101,16 @@ def predict(
102
  prompt = text
103
  repo_name = url.split("/")[-1]
104
  print(url)
105
- # print(commit_hash)
106
 
107
  repo_path = './local_path/'+repo_name # Local clone path
108
 
109
  clone_github_repo(url, repo_path, commit_hash)
110
  print("repo cloned")
111
  structure = build_repo_structure(repo_path)
112
- # print("type(structure)",type(structure))
113
  string_struture = show_project_structure(structure)
114
- # print("string_struturem,",string_struture)
115
 
116
  loc_prompt = get_loc_prompt(prompt,string_struture)
117
 
@@ -121,7 +120,7 @@ def predict(
121
  ]
122
 
123
  response = client.chat.completions.create(
124
- model="kimi-dev", # ε’ŒvLLMε―εŠ¨ζ—Άηš„δΈ€θ‡΄
125
  messages=messages,
126
  stream=True,
127
  temperature=temperature,
@@ -139,17 +138,17 @@ def predict(
139
 
140
  raw_answer=post_process(response)
141
  model_found_files = raw_answer.strip().split("\n")
142
- # print(response)
143
 
144
  highlight_response = highlight_thinking(response)
145
  yield [[prompt,highlight_response]], [["null test","null test2"]], "Generate: Success"
146
 
147
- # reading file content
148
  contents = ""
149
  for file_path in model_found_files:
150
  file_name = file_path.replace("```","")
151
  print(file_name)
152
- # pdb.set_trace()
153
  to_open_path = repo_path + "/" + file_name
154
 
155
  with open(to_open_path, "r", encoding="utf-8") as f:
@@ -171,7 +170,7 @@ def predict(
171
 
172
 
173
  response = client.chat.completions.create(
174
- model="kimi-dev", # ε’ŒvLLMε―εŠ¨ζ—Άηš„δΈ€θ‡΄
175
  messages=messages,
176
  stream=True,
177
  temperature=temperature,
@@ -207,9 +206,7 @@ def retry(
207
  yield (chatbot, history, "Empty context")
208
  return
209
 
210
- # chatbot.pop()
211
- # history.pop()
212
- # text = history.pop()[-1]
213
  if type(text) is tuple:
214
  text, _ = text
215
 
@@ -254,18 +251,17 @@ def build_demo(args: argparse.Namespace) -> gr.Blocks:
254
  text_box = gr.Textbox(label="Issue Description", placeholder="Enter issue description", container=False)
255
  with gr.Column(min_width=70):
256
  submit_btn = gr.Button("Send")
257
- # with gr.Column(min_width=70):
258
- # cancel_btn = gr.Button("Stop")
259
  with gr.Row():
260
  empty_btn = gr.Button("🧹 New Conversation")
261
  retry_btn = gr.Button("πŸ”„ Regenerate")
262
- # del_last_btn = gr.Button("πŸ—‘οΈ Remove Last Turn")
263
  def respond(message):
264
  return f"Url and commit hash submitted!"
265
  with gr.Column():
266
  url_box = gr.Textbox(label="Please input a Github url here",placeholder="Input your url", lines=1)
267
  commit_hash_box = gr.Textbox(label="Please input a commit hash here",placeholder="Input your commit hash", lines=1)
268
- # url_submit_btn = gr.Button("Submit")
269
  url_submit_btn = gr.Button("Submit")
270
  output = gr.Textbox(label="Submitted url and commit")
271
  url_submit_btn.click(fn=respond, inputs=upload_url, outputs=output)
@@ -327,11 +323,6 @@ def main(args: argparse.Namespace):
327
  reload_javascript()
328
 
329
  favicon_path = os.path.join("kimi_dev/serve/assets/favicon.ico")
330
- # demo.queue().launch(
331
- # favicon_path=favicon_path,
332
- # server_name=args.ip,
333
- # server_port=args.port,
334
- # )
335
  demo.queue().launch(
336
  favicon_path=favicon_path,
337
  server_name=args.ip,
@@ -340,11 +331,9 @@ def main(args: argparse.Namespace):
340
  )
341
 
342
  if __name__ == "__main__":
343
- print("start serving vllm")
344
  script_path = os.path.join(os.path.dirname(__file__), "serve_vllm.sh")
345
  subprocess.Popen(["bash", script_path])
346
  time.sleep(500)
347
- print("finiashed loading vllm")
348
 
349
  args = parse_args()
350
  print(args)
 
11
 
12
  import openai
13
 
 
14
 
15
  from kimi_dev.serve.frontend import reload_javascript
16
  from kimi_dev.serve.utils import (
 
101
  prompt = text
102
  repo_name = url.split("/")[-1]
103
  print(url)
104
+ print(commit_hash)
105
 
106
  repo_path = './local_path/'+repo_name # Local clone path
107
 
108
  clone_github_repo(url, repo_path, commit_hash)
109
  print("repo cloned")
110
  structure = build_repo_structure(repo_path)
111
+
112
  string_struture = show_project_structure(structure)
113
+
114
 
115
  loc_prompt = get_loc_prompt(prompt,string_struture)
116
 
 
120
  ]
121
 
122
  response = client.chat.completions.create(
123
+ model="kimi-dev",
124
  messages=messages,
125
  stream=True,
126
  temperature=temperature,
 
138
 
139
  raw_answer=post_process(response)
140
  model_found_files = raw_answer.strip().split("\n")
141
+
142
 
143
  highlight_response = highlight_thinking(response)
144
  yield [[prompt,highlight_response]], [["null test","null test2"]], "Generate: Success"
145
 
146
+
147
  contents = ""
148
  for file_path in model_found_files:
149
  file_name = file_path.replace("```","")
150
  print(file_name)
151
+
152
  to_open_path = repo_path + "/" + file_name
153
 
154
  with open(to_open_path, "r", encoding="utf-8") as f:
 
170
 
171
 
172
  response = client.chat.completions.create(
173
+ model="kimi-dev",
174
  messages=messages,
175
  stream=True,
176
  temperature=temperature,
 
206
  yield (chatbot, history, "Empty context")
207
  return
208
 
209
+
 
 
210
  if type(text) is tuple:
211
  text, _ = text
212
 
 
251
  text_box = gr.Textbox(label="Issue Description", placeholder="Enter issue description", container=False)
252
  with gr.Column(min_width=70):
253
  submit_btn = gr.Button("Send")
254
+
 
255
  with gr.Row():
256
  empty_btn = gr.Button("🧹 New Conversation")
257
  retry_btn = gr.Button("πŸ”„ Regenerate")
258
+
259
  def respond(message):
260
  return f"Url and commit hash submitted!"
261
  with gr.Column():
262
  url_box = gr.Textbox(label="Please input a Github url here",placeholder="Input your url", lines=1)
263
  commit_hash_box = gr.Textbox(label="Please input a commit hash here",placeholder="Input your commit hash", lines=1)
264
+
265
  url_submit_btn = gr.Button("Submit")
266
  output = gr.Textbox(label="Submitted url and commit")
267
  url_submit_btn.click(fn=respond, inputs=upload_url, outputs=output)
 
323
  reload_javascript()
324
 
325
  favicon_path = os.path.join("kimi_dev/serve/assets/favicon.ico")
 
 
 
 
 
326
  demo.queue().launch(
327
  favicon_path=favicon_path,
328
  server_name=args.ip,
 
331
  )
332
 
333
  if __name__ == "__main__":
 
334
  script_path = os.path.join(os.path.dirname(__file__), "serve_vllm.sh")
335
  subprocess.Popen(["bash", script_path])
336
  time.sleep(500)
 
337
 
338
  args = parse_args()
339
  print(args)
pyproject.toml CHANGED
@@ -1,7 +1,7 @@
1
  [project]
2
- name = "kimi_vl"
3
  version = "1.0.0"
4
- description = "Kimi-VL"
5
  license = {file = "LICENSE-CODE"}
6
  readme = "README.md"
7
  requires-python = ">=3.8"
 
1
  [project]
2
+ name = "kimi_dev"
3
  version = "1.0.0"
4
+ description = "Kimi-Dev"
5
  license = {file = "LICENSE-CODE"}
6
  readme = "README.md"
7
  requires-python = ">=3.8"
serve_vllm.sh CHANGED
@@ -10,7 +10,3 @@ python -m vllm.entrypoints.openai.api_server \
10
  --served-model-name kimi-dev \
11
  --port 8080
12
 
13
- SERVICE_URL="http://localhost:8080/v1/models"
14
- TIMEOUT=500 # ζœ€ε€§η­‰εΎ…η§’ζ•°
15
- INTERVAL=5 # ζ£€ζ΅‹ι—΄ιš”η§’ζ•°
16
- ELAPSED=0
 
10
  --served-model-name kimi-dev \
11
  --port 8080
12