Skip to content

Commit

Permalink
configs updated
Browse files Browse the repository at this point in the history
  • Loading branch information
WangLibo1995 committed May 8, 2024
1 parent 637bd22 commit 0e7ed6f
Show file tree
Hide file tree
Showing 11 changed files with 226 additions and 31 deletions.
6 changes: 3 additions & 3 deletions config/loveda/dcswin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.loveda_dataset import *
from geoseg.models.DCSwin import dcswin_small
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 30
Expand Down Expand Up @@ -63,7 +63,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)} # 0.1xlr for backbone
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=max_epoch, eta_min=1e-6)
Expand Down
6 changes: 3 additions & 3 deletions config/loveda/unetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.loveda_dataset import *
from geoseg.models.UNetFormer import UNetFormer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 30
Expand Down Expand Up @@ -79,7 +79,7 @@ def train_aug(img, mask):

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=max_epoch, eta_min=1e-6)
Expand Down
6 changes: 3 additions & 3 deletions config/potsdam/dcswin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.potsdam_dataset import *
from geoseg.models.DCSwin import dcswin_small
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params
from timm.scheduler.poly_lr import PolyLRScheduler

# training hparam
Expand Down Expand Up @@ -96,7 +96,7 @@ def val_aug(img, mask):

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=10, T_mult=2)
6 changes: 3 additions & 3 deletions config/potsdam/ftunetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.potsdam_dataset import *
from geoseg.models.FTUNetFormer import ft_unetformer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 45
Expand Down Expand Up @@ -64,7 +64,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=15, T_mult=2)
8 changes: 3 additions & 5 deletions config/potsdam/unetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.potsdam_dataset import *
from geoseg.models.UNetFormer import UNetFormer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 45
Expand All @@ -17,8 +17,6 @@
num_classes = len(CLASSES)
classes = CLASSES

test_time_aug = 'd4'
output_mask_dir, output_mask_rgb_dir = None, None
weights_name = "unetformer-r18-768crop-ms-e45"
weights_path = "model_weights/potsdam/{}".format(weights_name)
test_weights_name = "unetformer-r18-768crop-ms-e45"
Expand Down Expand Up @@ -64,7 +62,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=15, T_mult=2)
Expand Down
6 changes: 3 additions & 3 deletions config/uavid/unetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
from geoseg.losses import *
from geoseg.datasets.uavid_dataset import *
from geoseg.models.UNetFormer import UNetFormer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 40
Expand Down Expand Up @@ -66,7 +66,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=max_epoch)
Expand Down
6 changes: 3 additions & 3 deletions config/vaihingen/dcswin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.vaihingen_dataset import *
from geoseg.models.DCSwin import dcswin_small
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 70
Expand Down Expand Up @@ -96,7 +96,7 @@ def val_aug(img, mask):

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=10, T_mult=2)
Expand Down
6 changes: 3 additions & 3 deletions config/vaihingen/ftunetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.vaihingen_dataset import *
from geoseg.models.FTUNetFormer import ft_unetformer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 45
Expand Down Expand Up @@ -64,7 +64,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=15, T_mult=2)
Expand Down
6 changes: 3 additions & 3 deletions config/vaihingen/unetformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from geoseg.losses import *
from geoseg.datasets.vaihingen_dataset import *
from geoseg.models.UNetFormer import UNetFormer
from catalyst.contrib.nn import Lookahead
from catalyst import utils
from tools.utils import Lookahead
from tools.utils import process_model_params

# training hparam
max_epoch = 105
Expand Down Expand Up @@ -62,7 +62,7 @@

# define the optimizer
layerwise_params = {"backbone.*": dict(lr=backbone_lr, weight_decay=backbone_weight_decay)}
net_params = utils.process_model_params(net, layerwise_params=layerwise_params)
net_params = process_model_params(net, layerwise_params=layerwise_params)
base_optimizer = torch.optim.AdamW(net_params, lr=lr, weight_decay=weight_decay)
optimizer = Lookahead(base_optimizer)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=15, T_mult=2)
Expand Down
5 changes: 3 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
timm
catalyst==20.09
lightning
albumentations
ttach
Expand All @@ -9,4 +8,6 @@ opencv-python
scipy
matplotlib
einops
addict
addict
ftfy
regex
Loading

0 comments on commit 0e7ed6f

Please sign in to comment.