Spaces:
Build error
Build error
| import streamlit as st | |
| import gradio as gr | |
| from transformers import pipeline, AutoModelForSeq2SeqLM, AutoTokenizer | |
| import subprocess | |
| import os | |
| # Initialize Hugging Face pipelines | |
| text_generator = pipeline("text-generation", model="gpt2") | |
| code_generator = pipeline("text2text-generation", model="t5-base") | |
| # Streamlit App | |
| st.title("AI Dev Tool Kit") | |
| # Sidebar for Navigation | |
| st.sidebar.title("Navigation") | |
| app_mode = st.sidebar.selectbox("Choose the app mode", ["Explorer", "In-Chat Terminal", "Tool Box"]) | |
| if app_mode == "Explorer": | |
| st.header("Explorer") | |
| st.write("Explore files and projects here.") | |
| # Implement your explorer functionality here | |
| elif app_mode == "In-Chat Terminal": | |
| st.header("In-Chat Terminal") | |
| def run_terminal_command(command): | |
| try: | |
| result = subprocess.run(command, shell=True, capture_output=True, text=True) | |
| return result.stdout if result.returncode == 0 else result.stderr | |
| except Exception as e: | |
| return str(e) | |
| def terminal_interface(command): | |
| response = run_terminal_command(command) | |
| return response | |
| def nlp_code_interpreter(text): | |
| response = code_generator(text, max_length=150) | |
| code = response[0]['generated_text'] | |
| return code, run_terminal_command(code) | |
| with gr.Blocks() as iface: | |
| terminal_input = gr.Textbox(label="Enter Command or Code") | |
| terminal_output = gr.Textbox(label="Terminal Output", lines=10) | |
| terminal_button = gr.Button("Run") | |
| terminal_button.click( | |
| nlp_code_interpreter, | |
| inputs=terminal_input, | |
| outputs=[terminal_output, terminal_output] | |
| ) | |
| iface.launch() | |
| st.write("Use the terminal to execute commands or interpret natural language into code.") | |
| elif app_mode == "Tool Box": | |
| st.header("Tool Box") | |
| st.write("Access various AI development tools here.") | |
| # Implement your tool box functionality here | |
| # Deploy to Hugging Face Spaces | |
| def deploy_to_huggingface(app_name): | |
| code = f""" | |
| import gradio as gr | |
| def run_terminal_command(command): | |
| try: | |
| result = subprocess.run(command, shell=True, capture_output=True, text=True) | |
| return result.stdout if result.returncode == 0 else result.stderr | |
| except Exception as e: | |
| return str(e) | |
| def nlp_code_interpreter(text): | |
| response = code_generator(text, max_length=150) | |
| code = response[0]['generated_text'] | |
| return code, run_terminal_command(code) | |
| with gr.Blocks() as iface: | |
| terminal_input = gr.Textbox(label="Enter Command or Code") | |
| terminal_output = gr.Textbox(label="Terminal Output", lines=10) | |
| terminal_button = gr.Button("Run") | |
| terminal_button.click( | |
| nlp_code_interpreter, | |
| inputs=terminal_input, | |
| outputs=[terminal_output, terminal_output] | |
| ) | |
| iface.launch() | |
| """ | |
| with open("app.py", "w") as f: | |
| f.write(code) | |
| try: | |
| subprocess.run(["huggingface-cli", "repo", "create", "--type", "space", "--space_sdk", "gradio", app_name], check=True) | |
| subprocess.run(["git", "init"], cwd=f"./{app_name}", check=True) | |
| subprocess.run(["git", "add", "."], cwd=f"./{app_name}", check=True) | |
| subprocess.run(['git', 'commit', '-m', '"Initial commit"'], cwd=f'./{app_name}', check=True) | |
| subprocess.run(["git", "push", "https://huggingface.co/spaces/" + app_name, "main"], cwd=f'./{app_name}', check=True) | |
| return f"Successfully deployed to Hugging Face Spaces: https://huggingface.co/spaces/{app_name}" | |
| except Exception as e: | |
| return f"Error deploying to Hugging Face Spaces: {e}" | |
| # Example usage | |
| if st.button("Deploy to Hugging Face"): | |
| app_name = "ai-dev-toolkit" | |
| deploy_status = deploy_to_huggingface(app_name) | |
| st.write(deploy_status) |