File size: 1,413 Bytes
e8fe080
 
8e0f1b0
e8fe080
8e0f1b0
 
 
e8fe080
 
 
8e0f1b0
e8fe080
8e0f1b0
 
 
 
 
 
 
 
e8fe080
 
8e0f1b0
 
e8fe080
8e0f1b0
 
 
 
 
e8fe080
1ddb9cd
e8fe080
8e0f1b0
1ddb9cd
8e0f1b0
e8fe080
8e0f1b0
 
1ddb9cd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import black

# Load model and tokenizer
model_name = "microsoft/CodeGPT-small-py"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

# Initialize the pipeline
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, temperature=0.5, top_p=0.9, max_length=150)

def generate_code_with_feedback(prompt):
    generated_code = generator(prompt, num_return_sequences=1)[0]['generated_text']
    # Apply self-check for code quality
    formatted_code = format_code(generated_code)
    return formatted_code

def format_code(code):
    return black.format_str(code, mode=black.Mode())

# Streamlit UI
st.title("Smart Code Generation and Fixing")
st.write("Enter a prompt to generate or fix code:")

option = st.radio("Select Action", ("Generate Code", "Fix Code"))
if option == "Generate Code":
    prompt = st.text_area("Prompt", "Write a Python function that reverses a string:")
else:
    prompt = st.text_area("Prompt", "Fix the following buggy Python code:\n\ndef reverse_string(s):\n    return s[::-1]")

if st.button("Generate Code"):
    if prompt:
        generated_code = generate_code_with_feedback(prompt)
        st.subheader("Generated Code")
        st.write(generated_code)
    else:
        st.warning("Please enter a prompt.")