un-index commited on
Commit
7c1c1eb
·
1 Parent(s): 0e7fd51
Files changed (1) hide show
  1. app.py +32 -20
app.py CHANGED
@@ -7,29 +7,41 @@ import gradio as gr
7
  # # "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \
8
  # # "researchers was the fact that the unicorns spoke perfect English."
9
 
10
- ex=None
11
- try:
12
- from transformers import AutoModelForCausalLM, AutoTokenizer
13
- tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
14
 
15
- # "EluttherAI" on this line and for the next occurence only
16
- # tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
17
- # model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
18
- except Exception as e:
19
- ex = e
20
 
21
  def f(text):
22
- try:
23
- if ex:
24
- raise Exception("err from transformers import: \n"+str(ex))
25
- model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
26
- default_do_sample = False #True (default is True)
27
- input_ids = tokenizer(text, return_tensors="pt").input_ids
28
- gen_tokens = model.generate(input_ids, do_sample=default_do_sample, temperature=0.9, max_length=30)
29
- gen_text = tokenizer.batch_decode(gen_tokens)[0]
30
- return gen_text
31
- except Exception as e:
32
- return "err: \n" + str(e)
 
 
 
 
 
 
 
 
 
 
 
 
33
  # from transformers import AutoModelForCausalLM, AutoTokenizer
34
  # return text
35
  # def predict(text):
 
7
  # # "previously unexplored valley, in the Andes Mountains. Even more surprising to the " \
8
  # # "researchers was the fact that the unicorns spoke perfect English."
9
 
10
+ # ex=None
11
+ # try:
12
+ # from transformers import AutoModelForCausalLM, AutoTokenizer
13
+ # tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
14
 
15
+ # # "EluttherAI" on this line and for the next occurence only
16
+ # # tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
17
+ # # model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
18
+ # except Exception as e:
19
+ # ex = e
20
 
21
  def f(text):
22
+ import requests
23
+
24
+ API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B"
25
+ headers = {"Authorization": "Bearer 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"}
26
+
27
+ def query(payload):
28
+ response = requests.post(API_URL, headers=headers, json=payload)
29
+ return response.json()
30
+
31
+ output = query(text)
32
+ return output
33
+
34
+ # try:
35
+ # if ex:
36
+ # raise Exception("err from transformers import: \n"+str(ex))
37
+ # model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
38
+ # default_do_sample = False #True (default is True)
39
+ # input_ids = tokenizer(text, return_tensors="pt").input_ids
40
+ # gen_tokens = model.generate(input_ids, do_sample=default_do_sample, temperature=0.9, max_length=30)
41
+ # gen_text = tokenizer.batch_decode(gen_tokens)[0]
42
+ # return gen_text
43
+ # except Exception as e:
44
+ # return "err: \n" + str(e)
45
  # from transformers import AutoModelForCausalLM, AutoTokenizer
46
  # return text
47
  # def predict(text):