shijisan commited on
Commit
b7ab4a4
·
verified ·
1 Parent(s): c0aa81c

Delete python

Browse files
Files changed (3) hide show
  1. python/Dockerfile +0 -12
  2. python/app.py +0 -41
  3. python/requirements.txt +0 -5
python/Dockerfile DELETED
@@ -1,12 +0,0 @@
1
- FROM python:3.12-slim
2
-
3
- RUN apt-get update && apt-get install -y git
4
-
5
- WORKDIR /app
6
-
7
- COPY requirements.txt .
8
- RUN pip install --no-cache-dir -r requirements.txt
9
-
10
- COPY app.py .
11
-
12
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
 
 
 
 
 
 
 
 
python/app.py DELETED
@@ -1,41 +0,0 @@
1
- from fastapi import FastApi, Request
2
- from pydantic import BaseModel
3
- from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
4
- from fastapi.middleware import CORSMiddleware
5
-
6
- app = FastApi()
7
-
8
- app.add_middleware(
9
- CORSMiddleware,
10
- allow_origins=["*"],
11
- allow_methods=["*"],
12
- allow_methods=["*"],
13
- )
14
-
15
- model_name = "pszemraj/long-t5-tglobal-base-16384-book-summary"
16
- tokenizer = AutoTokenizer.from_pretrained(model_name)
17
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
18
-
19
- class InputText(BaseModel):
20
- text: str
21
-
22
- @app.post("/summarize")
23
- async def summarize(input: InputText):
24
- inputs = tokenizer(
25
- input.text,
26
- return_tensors="pt",
27
- max_length=16384,
28
- truncation=True,
29
- )
30
-
31
- summary_ids = model.generate(
32
- inputs["input_ids"],
33
- max_length=1024,
34
- min_length=50,
35
- length_penalty=2.0,
36
- num_beams=4,
37
- early_stopping=True,
38
- )
39
-
40
- summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
41
- return summary
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
python/requirements.txt DELETED
@@ -1,5 +0,0 @@
1
- fastapi
2
- uvicorn
3
- transformers
4
- torch
5
- accelerate