-
Notifications
You must be signed in to change notification settings - Fork 194
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Uniform regularization; Visualization
- Loading branch information
Showing
5 changed files
with
104 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
import tensorflow as tf | ||
|
||
from rare.utils import shape_utils | ||
|
||
|
||
def tile_activation_maps_max_dimensions(maps, max_height, max_width): | ||
batch_size, map_height, map_width, map_depth = \ | ||
shape_utils.combined_static_and_dynamic_shape(maps) | ||
num_rows = max_height // map_height | ||
num_cols = max_width // map_width | ||
return tile_activation_maps_rows_cols(maps, num_rows, num_cols) | ||
|
||
|
||
def tile_activation_maps_rows_cols(maps, num_rows, num_cols): | ||
""" | ||
Args: | ||
maps: [batch_size, map_height, map_width, map_depth] | ||
Return: | ||
tiled_map: [batch_size, tiled_height, tiled_width] | ||
""" | ||
batch_size, map_height, map_width, map_depth = \ | ||
shape_utils.combined_static_and_dynamic_shape(maps) | ||
|
||
# padding | ||
num_maps = num_rows * num_cols | ||
padded_map = tf.cond( | ||
tf.greater(num_maps, map_depth), | ||
true_fn=lambda: tf.pad(maps, [[0, 0], [0, 0], [0, 0], [0, tf.maximum(num_maps - map_depth, 0)]]), | ||
false_fn=lambda: maps[:,:,:,:num_maps] | ||
) | ||
|
||
# reshape to [batch_size, map_height, map_width, num_rows, num_cols] | ||
reshaped_map = tf.reshape(padded_map, [batch_size, map_height, map_width, num_rows, num_cols]) | ||
|
||
# unstack and concat along widths | ||
width_concated_maps = tf.concat( | ||
tf.unstack(reshaped_map, axis=4), # => list of [batch_size, map_height, map_width, num_rows] | ||
axis=2) # => [batch_size, map_height, map_width * num_cols, num_rows] | ||
|
||
tiled_map = tf.concat( | ||
tf.unstack(width_concated_maps, axis=3), # => list of [batch_size, map_height, map_width * num_cols] | ||
axis=1) # => [batch_size, map_height * num_rows, map_width * num_cols] | ||
|
||
tiled_map = tf.expand_dims(tiled_map, axis=3) | ||
|
||
return tiled_map |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
import tensorflow as tf | ||
|
||
from rare.utils import visualization_utils | ||
|
||
class VisualizationUtilsTest(tf.test.TestCase): | ||
|
||
def test_tile_activation_maps_with_padding(self): | ||
test_maps = tf.random_uniform([64, 32, 100, 16]) | ||
tiled_map = visualization_utils.tile_activation_maps_rows_cols(test_maps, 5, 5) | ||
|
||
with self.test_session() as sess: | ||
tiled_map_output = tiled_map.eval() | ||
self.assertAllEqual(tiled_map_output.shape, [64, 32 * 5, 100 * 5, 1]) | ||
|
||
def test_tile_activation_maps_with_slicing(self): | ||
test_maps = tf.random_uniform([64, 32, 100, 16]) | ||
tiled_map = visualization_utils.tile_activation_maps_rows_cols(test_maps, 5, 1) | ||
|
||
with self.test_session() as sess: | ||
tiled_map_output = tiled_map.eval() | ||
self.assertAllEqual(tiled_map_output.shape, [64, 32 * 5, 100 * 1, 1]) | ||
|
||
def test_tile_activation_maps_max_sizes(self): | ||
test_maps = tf.random_uniform([64, 32, 100, 16]) | ||
tiled_map = visualization_utils.tile_activation_maps_max_dimensions( | ||
test_maps, 512, 512) | ||
|
||
with self.test_session() as sess: | ||
tiled_map_output = tiled_map.eval() | ||
self.assertAllEqual(tiled_map_output.shape, [64, 512, 500, 1]) | ||
|
||
|
||
if __name__ == '__main__': | ||
tf.test.main() |