File size: 1,544 Bytes
9fbd85a
 
361bf77
 
9476518
 
 
 
 
9fbd85a
 
 
4cb16e0
 
 
 
 
 
 
 
 
 
361bf77
 
 
 
 
 
9476518
4cb16e0
361bf77
9476518
361bf77
 
216ff08
 
 
 
9fbd85a
361bf77
 
9476518
 
216ff08
9476518
216ff08
 
 
361bf77
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import gradio as gr
from transformers import pipeline
import json
from datetime import datetime
import os

LOG_FILE = "/tmp/log.jsonl"
if not os.path.exists(LOG_FILE):
    with open(LOG_FILE, "w"): pass

model = pipeline("token-classification", model="benchaffe/Bert-RAdam-Large", aggregation_strategy="simple")

def to_serializable(obj):
    if isinstance(obj, (float, int, str, bool, type(None))):
        return obj
    elif isinstance(obj, dict):
        return {k: to_serializable(v) for k, v in obj.items()}
    elif isinstance(obj, list):
        return [to_serializable(i) for i in obj]
    else:
        return str(obj)

def log_interaction(input_text, prediction):
    log_entry = {
        "timestamp": datetime.now().isoformat(),
        "input": input_text,
        "prediction": to_serializable(prediction)
    }
    with open(LOG_FILE, "a") as f:
        f.write(json.dumps(log_entry) + "\n")

def predict_and_log(text):
    result = model(text)
    log_interaction(text, result)
    return result

def get_log_file():
    return LOG_FILE

with gr.Blocks() as demo:
    gr.Markdown("## Biomedical Abbreviation Identifier")
    input_box = gr.Textbox(label="Enter biomedical text")
    output_box = gr.JSON(label="Model Prediction")
    download_output = gr.File(label="Click to download")
    submit_btn = gr.Button("Submit")
    download_btn = gr.Button("Download Log")
    submit_btn.click(fn=predict_and_log, inputs=input_box, outputs=output_box)
    download_btn.click(fn=get_log_file, outputs=download_output)

demo.launch()