Spaces:
Sleeping
Sleeping
Commit
·
c6d9fb0
1
Parent(s):
cf47e9d
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,16 +5,16 @@ from huggingface_hub import Repository
|
|
| 5 |
tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT")
|
| 6 |
model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT")
|
| 7 |
|
| 8 |
-
sarcasm_adapter = Repository(local_dir="sarcasm_adapter", clone_from="nehalelkaref/sarcasm_adapter")
|
| 9 |
-
aoc3_adapter = Repository(local_dir="aoc3_adapter", clone_from="nehalelkaref/aoc3_adapter")
|
| 10 |
-
aoc4_adapter = Repository(local_dir="aoc4_adapter", clone_from="nehalelkaref/aoc4_adapter")
|
| 11 |
-
fusion_adapter = Repository(local_dir="fusion_adapter", clone_from="nehalelkaref/region_fusion")
|
| 12 |
|
| 13 |
-
model.load_adapter("
|
| 14 |
-
model.load_adapter("
|
| 15 |
-
model.load_adapter("
|
| 16 |
|
| 17 |
-
model.load_adapter_fusion("
|
| 18 |
|
| 19 |
pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model)
|
| 20 |
|
|
|
|
| 5 |
tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT")
|
| 6 |
model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT")
|
| 7 |
|
| 8 |
+
# sarcasm_adapter = Repository(local_dir="sarcasm_adapter", clone_from="nehalelkaref/sarcasm_adapter")
|
| 9 |
+
# aoc3_adapter = Repository(local_dir="aoc3_adapter", clone_from="nehalelkaref/aoc3_adapter")
|
| 10 |
+
# aoc4_adapter = Repository(local_dir="aoc4_adapter", clone_from="nehalelkaref/aoc4_adapter")
|
| 11 |
+
# fusion_adapter = Repository(local_dir="fusion_adapter", clone_from="nehalelkaref/region_fusion")
|
| 12 |
|
| 13 |
+
model.load_adapter("nehalelkaref/aoc3_adapter", set_active=True, with_head=False, source="hf")
|
| 14 |
+
model.load_adapter("nehalelkaref/aoc4_adapter", set_active=True, with_head=False, source="hf")
|
| 15 |
+
model.load_adapter("nehalelkaref/sarcasm_adapter", set_active=True, with_head=False, source="hf")
|
| 16 |
|
| 17 |
+
model.load_adapter_fusion("nehalelkaref/region_fusion",with_head=True, set_active=True, source="hf")
|
| 18 |
|
| 19 |
pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model)
|
| 20 |
|