File size: 3,845 Bytes
75197d8
3dec2d1
 
1c8febf
73d4a73
0cc2838
5e3b1a8
75197d8
bba0271
1588190
5e3b1a8
 
 
1588190
7c1c1eb
 
 
 
1c8febf
7c1c1eb
 
 
 
 
1588190
1c8febf
68394f9
1c8febf
9ef03c3
5f283d9
68394f9
 
759a131
 
 
d21d696
68394f9
 
 
73d4a73
 
9a78c86
73d4a73
 
68394f9
1c8febf
3dec2d1
 
5e06490
759a131
 
 
 
 
 
 
 
 
 
3dec2d1
759a131
 
 
a9ac28a
c972f99
759a131
 
 
 
 
9ef03c3
e840be8
3dec2d1
a9ac28a
73d4a73
a9ac28a
 
73d4a73
3dec2d1
bbef3ac
5e06490
759a131
dd62ca3
4c97ef8
1c8febf
 
 
759a131
 
3dec2d1
759a131
9a78c86
3dec2d1
1588190
0cc2838
95c5c80
1588190
3e6cabd
95c5c80
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102

from random import randint
from transformers import pipeline, set_seed
import requests
import json
import gradio as gr
# # from transformers import AutoModelForCausalLM, AutoTokenizer

# stage, commit, push

# # prompt = "In a shocking finding, scientists discovered a herd of unicorns living in a remote, " \
# #          "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \
# #          "researchers was the fact that the unicorns spoke perfect English."

# ex=None
# try:
#     from transformers import AutoModelForCausalLM, AutoTokenizer
#     tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")

#     # "EluttherAI" on this line and for the next occurence only
#     # tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
#     # model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
# except Exception as e:
#     ex = e

temperature = gr.inputs.Slider(
    minimum=0, maximum=1.5, default=0.8, label="temperature")
top_p = gr.inputs.Slider(minimum=0, maximum=1.0,
                         default=0.9, label="top_p")

# gradio checkbutton

generator = pipeline('text-generation', model='gpt2')


title = "GPT-J-6B/GPT-2 based text generator"

examples = [
    # another machine learning example
    [["For today's homework assignment, please describe the reasons for the US Civil War."],
        0.8, 0.9, 50, "GPT-2"],
    [["In a shocking discovery, scientists have found a herd of unicorns living in a remote, previously unexplored valley, in the Andes Mountains. Even more surprising to the researchers was the fact that the unicorns spoke perfect English."], 0.8, 0.9, 50, "GPT-2"],
    [["The first step in the process of developing a new language is to invent a new word."],
        0.8, 0.9, 50, "GPT-2"],
]


def f(context, temperature, top_p, max_length, model_idx):
    try:

        # maybe try "0" instead or 1, or "1"
        # use GPT-J-6B
        if model_idx == 0:

            # http://api.vicgalle.net:5000/docs#/default/generate_generate_post
            # https://pythonrepo.com/repo/vicgalle-gpt-j-api-python-natural-language-processing

            payload = {
                "context": context,
                "token_max_length": max_length,  # 512,
                "temperature": temperature,
                "top_p": top_p,
            }
            
            payload = json.dumps(payload)
            response = requests.post(
                "http://api.vicgalle.net:5000/generate", params=payload).json()
            return response['text']
        else:
            # use GPT-2
            # # could easily use the inference API in /gptinference.py but don't know if it supports length>250
            set_seed(randint(1, 2**31))
            # return sequences specifies how many to return
            response = generator(context, max_length=max_length, top_p=top_p,
                             temperature=temperature, num_return_sequences=1)
            print(response)
            return response  # ['generated_text']

            # args found in the source: https://github.com/huggingface/transformers/blob/27b3031de2fb8195dec9bc2093e3e70bdb1c4bff/src/transformers/generation_tf_utils.py#L348-L376

    except Exception as e:
        return f"error with idx{model_idx} : \n"+str(e)


iface = gr.Interface(f, [
    "text",
    temperature,
    top_p,
    gr.inputs.Slider(
        minimum=20, maximum=512, default=30, label="max length"),
    gr.inputs.Dropdown(["GPT-J-6B", "GPT-2"], type="index", label="model"),
], outputs="text", title=title, examples=examples)
iface.launch()  # enable_queue=True

# all below works but testing
# import gradio as gr


# gr.Interface.load("huggingface/EleutherAI/gpt-j-6B",
#     inputs=gr.inputs.Textbox(lines=10, label="Input Text"),
#     title=title, examples=examples).launch();