Spaces:
Sleeping
Sleeping
File size: 1,543 Bytes
e5f56bc f733473 16f51b6 f733473 2b34323 f733473 16f51b6 f733473 ae69382 5bd114c f733473 5bd114c f733473 fef64eb f733473 e5f56bc 16f51b6 e5f56bc f733473 e5f56bc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import gradio as gr
from optimum.onnxruntime import ORTModelForSeq2SeqLM
from transformers import pipeline
import logging
import os
logger = logging.Logger('sc_summ')
logger.setLevel(logging.INFO)
# File handler (for logging to file)
local_dir = os.path.dirname(__file__)
file_handler = logging.FileHandler(os.path.join(local_dir, "sc_summ.log"), encoding='utf-8')
file_handler.setLevel(logging.INFO)
file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
def get_summarizer():
model = ORTModelForSeq2SeqLM.from_pretrained(
"shorecode/t5-efficient-tiny-summarizer-general-purpose-v3"
)
return pipeline(
"summarization",
model=model,
tokenizer="shorecode/t5-efficient-tiny-summarizer-general-purpose-v3",
)
SUMMARY_MODEL = get_summarizer()
def summarize(source):
try:
summary = SUMMARY_MODEL(
source,
max_new_tokens=4000,
min_length=20,
no_repeat_ngram_size=2,
num_beams=3,
)
try:
summary = summary[0]['summary_text']
except (IndexError, KeyError):
pass
return summary
return summary
except Exception as e:
logger.error(f"An error occured while summarizing: {e}")
return "An error occurred"
def greet(name):
return "Hello " + __file__ + "!!"
demo = gr.Interface(fn=summarize, inputs="text", outputs="text")
demo.launch() |