Update app.py
Browse files
app.py
CHANGED
|
@@ -16,14 +16,14 @@ tokenizer = AutoTokenizer.from_pretrained(model_checkpoint1)
|
|
| 16 |
class MyModel(nn.Module):
|
| 17 |
def __init__(self):
|
| 18 |
super().__init__()
|
| 19 |
-
self.bert1 = EsmForSequenceClassification.from_pretrained(model_checkpoint1, num_labels=
|
| 20 |
# for param in self.bert1.parameters():
|
| 21 |
# param.requires_grad = False
|
| 22 |
self.bn1 = nn.BatchNorm1d(256)
|
| 23 |
self.bn2 = nn.BatchNorm1d(128)
|
| 24 |
self.bn3 = nn.BatchNorm1d(64)
|
| 25 |
self.relu = nn.LeakyReLU()
|
| 26 |
-
self.fc1 = nn.Linear(
|
| 27 |
self.fc2 = nn.Linear(256, 128)
|
| 28 |
self.fc3 = nn.Linear(128, 64)
|
| 29 |
self.output_layer = nn.Linear(64, 2)
|
|
|
|
| 16 |
class MyModel(nn.Module):
|
| 17 |
def __init__(self):
|
| 18 |
super().__init__()
|
| 19 |
+
self.bert1 = EsmForSequenceClassification.from_pretrained(model_checkpoint1, num_labels=3000) # 3000
|
| 20 |
# for param in self.bert1.parameters():
|
| 21 |
# param.requires_grad = False
|
| 22 |
self.bn1 = nn.BatchNorm1d(256)
|
| 23 |
self.bn2 = nn.BatchNorm1d(128)
|
| 24 |
self.bn3 = nn.BatchNorm1d(64)
|
| 25 |
self.relu = nn.LeakyReLU()
|
| 26 |
+
self.fc1 = nn.Linear(3000, 256)
|
| 27 |
self.fc2 = nn.Linear(256, 128)
|
| 28 |
self.fc3 = nn.Linear(128, 64)
|
| 29 |
self.output_layer = nn.Linear(64, 2)
|