Spaces:
Sleeping
Sleeping
File size: 1,379 Bytes
010ed04 b715cee 010ed04 07840b6 010ed04 39308cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
class EmotionModel:
def __init__(self):
self.model_name = "joeddav/distilbert-base-uncased-go-emotions-student"
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
self.model = AutoModelForSequenceClassification.from_pretrained(self.model_name)
self.labels = self.model.config.id2label
def predict(self, text):
inputs = self.tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
logits = self.model(**inputs).logits
probs = torch.sigmoid(logits)[0]
return {
self.labels[i]: float(probs[i])
for i in range(len(probs)) if probs[i] > 0.4
}
class SuicidalIntentModel:
def __init__(self):
self.model_name = "sentinet/suicidality"
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
self.model = AutoModelForSequenceClassification.from_pretrained(self.model_name)
def predict(self, text):
inputs = self.tokenizer(text, return_tensors="pt", truncation=True, padding=True)
with torch.no_grad():
logits = self.model(**inputs).logits
probs = torch.nn.functional.softmax(logits, dim=1)
return float(probs[0][1]) # Probability of suicidal intent
|