map-anything / configs /train_params /vggt_finetune.yaml
aknapitsch user
initial commit of map anything demo
9507532
defaults:
- default
# Use 10x lower lr for finetuning
lr: 1e-05
min_lr: 1e-07
# Optimizer parameters specific to submodules
submodule_configs:
# DINOv2
model.aggregator.patch_embed:
lr: 5e-07
min_lr: 5e-09
warmup_epochs: ${train_params.warmup_epochs}
weight_decay: ${train_params.weight_decay}
schedule_type: ${train_params.schedule_type}