forked from facebookresearch/vissl
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Bug fix in the MLP layer: torch.squeeze was removing the batch size w…
…hen equal to 1 (facebookresearch#179) Summary: The fix comes with unit tests so that error will be caught up if it comes back. Pull Request resolved: facebookresearch#179 Reviewed By: prigoyal Differential Revision: D26338689 Pulled By: QuentinDuval fbshipit-source-id: be9aaf668faf046060df321b62a462d79c7b0d8b
- Loading branch information
1 parent
471d73f
commit 75d472b
Showing
3 changed files
with
67 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
import unittest | ||
|
||
import torch | ||
from vissl.models.heads import LinearEvalMLP, MLP | ||
from vissl.utils.hydra_config import AttrDict | ||
|
||
|
||
class TestMLP(unittest.TestCase): | ||
""" | ||
Unit test to verify that correct construction of MLP layers | ||
and linear evaluation MLP layers | ||
""" | ||
|
||
MODEL_CONFIG = AttrDict({ | ||
"HEAD": { | ||
"BATCHNORM_EPS": 1e-6, | ||
"BATCHNORM_MOMENTUM": 0.99, | ||
"PARAMS_MULTIPLIER": 1.0, | ||
} | ||
}) | ||
|
||
def test_mlp(self): | ||
mlp = MLP(self.MODEL_CONFIG, dims=[2048, 100]) | ||
|
||
x = torch.randn(size=(4, 2048)) | ||
out = mlp(x) | ||
assert out.shape == torch.Size([4, 100]) | ||
|
||
x = torch.randn(size=(1, 2048)) | ||
out = mlp(x) | ||
assert out.shape == torch.Size([1, 100]) | ||
|
||
def test_mlp_reshaping(self): | ||
mlp = MLP(self.MODEL_CONFIG, dims=[2048, 100]) | ||
|
||
x = torch.randn(size=(1, 2048, 1, 1)) | ||
out = mlp(x) | ||
assert out.shape == torch.Size([1, 100]) | ||
|
||
def test_mlp_catch_bad_shapes(self): | ||
mlp = MLP(self.MODEL_CONFIG, dims=[2048, 100]) | ||
|
||
x = torch.randn(size=(1, 2048, 2, 1)) | ||
with self.assertRaises(AssertionError) as context: | ||
mlp(x) | ||
assert context.exception is not None | ||
|
||
def test_eval_mlp_shape(self): | ||
eval_mlp = LinearEvalMLP( | ||
self.MODEL_CONFIG, | ||
in_channels=2048, | ||
dims=[2048 * 2 * 2, 1000], | ||
) | ||
|
||
resnet_feature_map = torch.randn(size=(4, 2048, 2, 2)) | ||
out = eval_mlp(resnet_feature_map) | ||
assert out.shape == torch.Size([4, 1000]) | ||
|
||
resnet_feature_map = torch.randn(size=(1, 2048, 2, 2)) | ||
out = eval_mlp(resnet_feature_map) | ||
assert out.shape == torch.Size([1, 1000]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters