Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Peter
commited on
Commit
·
4fc786e
1
Parent(s):
0078f3c
disable cacheing
Browse files
app.py
CHANGED
|
@@ -126,9 +126,9 @@ def load_examples(examples_dir="examples"):
|
|
| 126 |
if __name__ == "__main__":
|
| 127 |
|
| 128 |
model, tokenizer = load_model_and_tokenizer("pszemraj/led-large-book-summary")
|
| 129 |
-
title = "Long-form
|
| 130 |
description = (
|
| 131 |
-
"This is a simple example of using the LED model to summarize a long-form text."
|
| 132 |
)
|
| 133 |
|
| 134 |
gr.Interface(
|
|
@@ -160,4 +160,5 @@ if __name__ == "__main__":
|
|
| 160 |
title=title,
|
| 161 |
description=description,
|
| 162 |
examples=load_examples(),
|
|
|
|
| 163 |
).launch(enable_queue=True, share=True)
|
|
|
|
| 126 |
if __name__ == "__main__":
|
| 127 |
|
| 128 |
model, tokenizer = load_model_and_tokenizer("pszemraj/led-large-book-summary")
|
| 129 |
+
title = "Long-form Summarization: LED & BookSum"
|
| 130 |
description = (
|
| 131 |
+
"This is a simple example of using the LED model to summarize a long-form text. This model is a fine-tuned version of [allenai/led-large-16384](https://huggingface.co/allenai/led-large-16384) on the booksum dataset. the goal was to create a model that can generalize well and is useful in summarizing lots of text in academic and daily usage."
|
| 132 |
)
|
| 133 |
|
| 134 |
gr.Interface(
|
|
|
|
| 160 |
title=title,
|
| 161 |
description=description,
|
| 162 |
examples=load_examples(),
|
| 163 |
+
cache_examples=False,
|
| 164 |
).launch(enable_queue=True, share=True)
|