Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # prompt = "In a shocking finding, scientists discovered a herd of unicorns living in a remote, " \ | |
| # "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \ | |
| # "researchers was the fact that the unicorns spoke perfect English." | |
| def predict(text): | |
| tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B") | |
| model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B") | |
| input_ids = tokenizer(text, return_tensors="pt").input_ids | |
| gen_tokens = model.generate(input_ids, do_sample=True, temperature=0.9, max_length=100) | |
| gen_text = tokenizer.batch_decode(gen_tokens)[0] | |
| return gen_text | |
| iface = gr.Interface(fn=predict, inputs="text", outputs="text") | |
| iface.launch() | |
| # all below works but testing | |
| # import gradio as gr | |
| # title = "GPT-J-6B" | |
| # examples = [ | |
| # ['The tower is 324 metres (1,063 ft) tall,'], | |
| # ["The Moon's orbit around Earth has"], | |
| # ["The smooth Borealis basin in the Northern Hemisphere covers 40%"] | |
| # ] | |
| # gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", | |
| # inputs=gr.inputs.Textbox(lines=10, label="Input Text"), | |
| # title=title, examples=examples).launch(); | |