File size: 997 Bytes
4375b7f
4e683ec
76a154f
 
 
b1c12fa
76a154f
d754671
 
 
 
 
1ec2891
d754671
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import os
from threading import Thread
from typing import Iterator

import gradio as gr
import spaces
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("gpt2")
model = AutoModelForCausalLM.from_pretrained("gpt2")

@spaces.GPU
def text_generation(input_text, seed):
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
  torch.manual_seed(seed) # Max value: 18446744073709551615
  outputs = model.generate(input_ids, do_sample=True, max_length=100)
  generated_text = tokenizer.batch_decode(outputs, skip_special_tokens=True)
  return generated_text

title = "palmer demo"
description = "Text completion app by appvoid"

gr.Interface(
    text_generation,
    [gr.inputs.Textbox(lines=2, label="Enter input text"), gr.inputs.Number(default=10, label="Enter seed number")],
    [gr.outputs.Textbox(type="auto", label="Text Generated")],
    title=title,
    description=description,
    theme="huggingface"
).launch()