Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
| import torch | |
| # Load the model and tokenizer from Hugging Face | |
| model_name = "KevSun/Personality_LM" | |
| model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| # Streamlit app | |
| st.title("Personality Prediction App") | |
| st.write("Enter your text below to predict personality traits:") | |
| # Input text from user | |
| user_input = st.text_area("Your text here:") | |
| if st.button("Predict"): | |
| if user_input: | |
| # Tokenize input text | |
| inputs = tokenizer(user_input, return_tensors="pt") | |
| # Get predictions from the model | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| # Extract the predictions | |
| predictions = torch.nn.functional.softmax(outputs.logits, dim=-1) | |
| predictions = predictions[0].tolist() | |
| # Display the predictions | |
| labels = ["Extraversion", "Agreeableness", "Conscientiousness", "Neuroticism", "Openness"] | |
| for label, score in zip(labels, predictions): | |
| st.write(f"{label}: {score:.4f}") | |
| else: | |
| st.write("Please enter some text to get predictions.") | |