Spaces:
Sleeping
Sleeping
un-index
commited on
Commit
·
d5076c2
1
Parent(s):
ab2b147
app.py
CHANGED
|
@@ -194,7 +194,25 @@ def f(context, temperature, top_p, max_length, model_idx, SPACE_VERIFICATION_KEY
|
|
| 194 |
# TODO use fallback gpt-2 inference api for this as well
|
| 195 |
# TODO or just make it an option in the menu "GPT-2 inference"
|
| 196 |
else:
|
| 197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
|
| 199 |
except Exception as e:
|
| 200 |
return f"error with idx{model_idx}: "+str(e)
|
|
|
|
| 194 |
# TODO use fallback gpt-2 inference api for this as well
|
| 195 |
# TODO or just make it an option in the menu "GPT-2 inference"
|
| 196 |
else:
|
| 197 |
+
API_URL = "https://api-inference.huggingface.co/models/distilgpt2"
|
| 198 |
+
generated_text=""
|
| 199 |
+
while (max_length > 0):
|
| 200 |
+
# NOTE see original implementation above for gpt-J-6B
|
| 201 |
+
payload = {"inputs": context, "parameters": {"max_new_tokens": max_length>250 and 250 or max_length, "temperature": temperature, "top_p": top_p}}
|
| 202 |
+
response = requests.request("POST", API_URL, data=json.dumps(payload), headers=headers)
|
| 203 |
+
context = json.loads(response.content.decode("utf-8"))
|
| 204 |
+
|
| 205 |
+
context = get_generated_text(context)
|
| 206 |
+
|
| 207 |
+
generated_text += context
|
| 208 |
+
max_length -= 250
|
| 209 |
+
|
| 210 |
+
# payload = {"inputs": context, "parameters":{
|
| 211 |
+
# "max_new_tokens":max_length, "temperature":temperature, "top_p":top_p}}
|
| 212 |
+
# data = json.dumps(payload)
|
| 213 |
+
# response = requests.request("POST", API_URL, data=data, headers=headers)
|
| 214 |
+
# generated_text = json.loads(response.content.decode("utf-8"))[0]['generated_text']
|
| 215 |
+
return generated_text#context #_context+generated_text
|
| 216 |
|
| 217 |
except Exception as e:
|
| 218 |
return f"error with idx{model_idx}: "+str(e)
|