File size: 992 Bytes
49e02c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
from fastapi import FastApi, Request
from pydantic import BaseModel
from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
from fastapi.middleware import CORSMiddleware

app = FastApi()

app.add_middleware(
   CORSMiddleware,
   allow_origins=["*"],
   allow_methods=["*"],
   allow_methods=["*"],
)

model_name = "pszemraj/long-t5-tglobal-base-16384-book-summary"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)

class InputText(BaseModel):
   text: str

@app.post("/summarize")
async def summarize(input: InputText):
   inputs = tokenizer(
      input.text,
      return_tensors="pt",
      max_length=16384,
      truncation=True,
   )

   summary_ids = model.generate(
      inputs["input_ids"],
      max_length=1024,
      min_length=50,
      length_penalty=2.0,
      num_beams=4,
      early_stopping=True,
   )

   summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
   return summary