un-index commited on
Commit
75197d8
·
1 Parent(s): a12ede0
Files changed (1) hide show
  1. app.py +14 -6
app.py CHANGED
@@ -1,15 +1,23 @@
 
 
1
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
2
  model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
3
  tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
4
 
5
- prompt = "In a shocking finding, scientists discovered a herd of unicorns living in a remote, " \
6
- "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \
7
- "researchers was the fact that the unicorns spoke perfect English."
8
 
9
- input_ids = tokenizer(prompt, return_tensors="pt").input_ids
10
 
11
- gen_tokens = model.generate(input_ids, do_sample=True, temperature=0.9, max_length=100)
12
- gen_text = tokenizer.batch_decode(gen_tokens)[0]
 
 
 
 
 
 
13
 
14
  # all below works but testing
15
  # import gradio as gr
 
1
+
2
+ import gradio as gr
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
  model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
6
  tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
7
 
8
+ # prompt = "In a shocking finding, scientists discovered a herd of unicorns living in a remote, " \
9
+ # "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \
10
+ # "researchers was the fact that the unicorns spoke perfect English."
11
 
 
12
 
13
+ def predict(text):
14
+ input_ids = tokenizer(text, return_tensors="pt").input_ids
15
+ gen_tokens = model.generate(input_ids, do_sample=True, temperature=0.9, max_length=100)
16
+ gen_text = tokenizer.batch_decode(gen_tokens)[0]
17
+ return gen_text
18
+
19
+ iface = gr.Interface(fn=predict, inputs="text", outputs="text")
20
+ iface.launch()
21
 
22
  # all below works but testing
23
  # import gradio as gr