Spaces:
Sleeping
Sleeping
un-index
commited on
Commit
·
cb49423
1
Parent(s):
cbabcb5
app.py
CHANGED
|
@@ -194,12 +194,12 @@ def f(context, temperature, top_p, max_length, model_idx, SPACE_VERIFICATION_KEY
|
|
| 194 |
# TODO use fallback gpt-2 inference api for this as well
|
| 195 |
# TODO or just make it an option in the menu "GPT-2 inference"
|
| 196 |
else:
|
| 197 |
-
|
| 198 |
generated_text=""
|
| 199 |
while (max_length > 0):
|
| 200 |
# NOTE see original implementation above for gpt-J-6B
|
| 201 |
payload = {"inputs": context, "parameters": {"max_new_tokens": 250, "temperature": temperature, "top_p": top_p}}
|
| 202 |
-
response = requests.request("POST",
|
| 203 |
context = json.loads(response.content.decode("utf-8"))
|
| 204 |
|
| 205 |
context = get_generated_text(context)
|
|
|
|
| 194 |
# TODO use fallback gpt-2 inference api for this as well
|
| 195 |
# TODO or just make it an option in the menu "GPT-2 inference"
|
| 196 |
else:
|
| 197 |
+
DISTIL_GPT2_API_URL = "https://api-inference.huggingface.co/models/distilgpt2"
|
| 198 |
generated_text=""
|
| 199 |
while (max_length > 0):
|
| 200 |
# NOTE see original implementation above for gpt-J-6B
|
| 201 |
payload = {"inputs": context, "parameters": {"max_new_tokens": 250, "temperature": temperature, "top_p": top_p}}
|
| 202 |
+
response = requests.request("POST", DISTIL_GPT2_API_URL, data=json.dumps(payload), headers=headers)
|
| 203 |
context = json.loads(response.content.decode("utf-8"))
|
| 204 |
|
| 205 |
context = get_generated_text(context)
|