Spaces:
Runtime error
Runtime error
Commit
Β·
6220981
1
Parent(s):
b214254
Update app.py
Browse files
app.py
CHANGED
|
@@ -8,19 +8,19 @@ import torch
|
|
| 8 |
|
| 9 |
#Summarization Fine Tune Model
|
| 10 |
|
| 11 |
-
model_path
|
| 12 |
-
tokenizer
|
| 13 |
-
|
|
|
|
| 14 |
|
| 15 |
-
def summarize_text(text):
|
| 16 |
-
text = "This is the initial testing text"
|
| 17 |
# Tokenize the input text
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
|
|
|
| 24 |
|
| 25 |
# Sentiment Analysis Pre-Trained Model
|
| 26 |
model_path = "leadingbridge/sentiment-analysis"
|
|
|
|
| 8 |
|
| 9 |
#Summarization Fine Tune Model
|
| 10 |
|
| 11 |
+
def summarize_text(text, model_path="leadingbridge/summarization"):
|
| 12 |
+
# Load the tokenizer and model
|
| 13 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 14 |
+
model = T5ForConditionalGeneration.from_pretrained(model_path)
|
| 15 |
|
|
|
|
|
|
|
| 16 |
# Tokenize the input text
|
| 17 |
+
inputs = tokenizer.encode(text, return_tensors="pt")
|
| 18 |
+
|
| 19 |
+
# Generate the summary
|
| 20 |
+
summary_ids = model.generate(inputs)
|
| 21 |
+
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
|
| 22 |
+
|
| 23 |
+
return summary
|
| 24 |
|
| 25 |
# Sentiment Analysis Pre-Trained Model
|
| 26 |
model_path = "leadingbridge/sentiment-analysis"
|