Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Little refactorings for cleaner code #9

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ trainer = single_trainer.SingleTrainer(
label_index=0,
logits_dimension=2,
record_defaults=[0, 0, 0, 0],
filename="model_search/data/testdata/csv_random_data.csv")),
filename="model_search/data/testdata/csv_random_data.csv"),
spec=constants.DEFAULT_DNN)

trainer.try_models(
Expand Down Expand Up @@ -153,7 +153,8 @@ class Block(object, metaclass=abc.ABCMeta):
output_tensors: A list of the output tensors.
"""

@abc.abstractproperty
@property
@abc.abstractmethod
def is_input_order_important(self):
"""Is the order of the entries in the input tensor important.

Expand Down
1 change: 0 additions & 1 deletion model_search/architecture/architecture_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ def fix_architecture_order(architecture, problem_type):
if problem_type != phoenix_spec_pb2.PhoenixSpec.CNN:
return architecture

output_architecture = []
flattens = tuple(block for block in architecture
if "FLATTEN" in blocks.BlockType(block).name)
if not flattens:
Expand Down
3 changes: 2 additions & 1 deletion model_search/architecture/graph_architecture.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ class Combiner(metaclass=abc.ABCMeta):
inputs, so we cannot make all Reducers output only one Tensor.
"""

@abc.abstractproperty
@property
@abc.abstractmethod
def name(self):
"""Name of the Combiner."""

Expand Down
3 changes: 2 additions & 1 deletion model_search/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ def build(self, input_tensors, is_training, lengths=None):
output_tensors: A list of the output tensors.
"""

@abc.abstractproperty
@property
@abc.abstractmethod
def is_input_order_important(self):
"""Is the order of the entries in the input tensor important.

Expand Down
2 changes: 1 addition & 1 deletion model_search/blocks_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ class BlocksImportTest(parameterized.TestCase, tf.test.TestCase):
def test_ability_to_import(self, block, input_rank):
# Force graph mode
with tf.compat.v1.Graph().as_default():
input_tensor = tf.zeros([32] * (input_rank))
input_tensor = tf.zeros([32] * input_rank)

with arg_scope(architecture_utils.DATA_FORMAT_OPS, data_format='NHWC'):
with tf.compat.v1.variable_scope('scope_a'):
Expand Down
2 changes: 1 addition & 1 deletion model_search/controller_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def _create_spec(replay_towers=0, ensemble_type="none"):
return output


_TRIALS = [trial_module.Trial({"id": id + 1}) for id in range(105)]
_TRIALS = [trial_module.Trial({"id": id_ + 1}) for id_ in range(105)]


def _create_trials(num_trials):
Expand Down
3 changes: 2 additions & 1 deletion model_search/generators/base_tower_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def _wait_for_chief(self, model_dir):
if not self._phoenix_spec.HasField("replay"):
self._metadata.unblock_stopped_infeasible_trial(my_id)

@abc.abstractproperty
@property
@abc.abstractmethod
def generator_name(self):
"""The name of the generator."""

Expand Down
3 changes: 2 additions & 1 deletion model_search/metadata/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
class MetaData(object, metaclass=abc.ABCMeta):
"""Api for metadata storage."""

@abc.abstractproperty
@property
@abc.abstractmethod
def name(self):
"""The name of the metadata storage handler."""

Expand Down
1 change: 1 addition & 0 deletions model_search/ops/svdf_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ def __init__(self,
self.dropout = min(1., max(0., dropout))
self.num_filters = self.rank * self.units
self._dropout_mask = None
self.num_features = None
self.feature_kernel = []
self.time_kernel = []
self.bias = []
Expand Down
2 changes: 1 addition & 1 deletion model_search/oss_trainer_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def loss_and_metric_and_predictions_fn(provider):
if getattr(provider, "get_predictions_fn", None) is not None:
predictions_fn = provider.get_predictions_fn()

return (loss_fn, metric_fn, predictions_fn)
return loss_fn, metric_fn, predictions_fn


def make_run_config(model_dir=None, use_tpu=False):
Expand Down
2 changes: 1 addition & 1 deletion model_search/phoenix.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ def _increment_global_step(self, train_op, train_steps, tower_name):

increment_amount = 1
tower_size = architecture_utils.get_architecture_size(tower_name)
if (self._phoenix_spec.use_parameter_scaled_training and tower_size):
if self._phoenix_spec.use_parameter_scaled_training and tower_size:
train_step_per_block = max(
int(train_steps // self._phoenix_spec.maximum_depth), 1)
tower_train_steps = tower_size * train_step_per_block
Expand Down
1 change: 0 additions & 1 deletion model_search/search/categorical_harmonica.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,6 @@ def get_suggestion(self, trials, hparams, my_trial_id=None, model_dir=None):
# Expanding to polynomial features
x = self._get_polynomial_expansion(feature_extender, x)

zeroed_coeff = None
relevant_variables = []
for _ in range(self._num_of_restarts):
logging.info("Running linear regression..")
Expand Down
6 changes: 3 additions & 3 deletions model_search/search/categorical_harmonica_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,10 +304,10 @@ def test_extract_relevant_variables_indices(self):
output = algorithm._extract_relevant_variables_indices(
feature_extender, np.array([1, 1, 1] + [0] * 7 + [1]))
logging.info(output)
self.assertAllEqual(output, set([0, 1, 2, 3]))
self.assertAllEqual(output, {0, 1, 2, 3})
output = algorithm._extract_relevant_variables_indices(
feature_extender, np.array([1, 0, 0] + [0] * 7 + [1]))
self.assertAllEqual(output, set([2, 3]))
self.assertAllEqual(output, {2, 3})
output = algorithm._extract_relevant_variables_indices(
feature_extender, np.array([1, 0, 0] + [0] * 7 + [0]))
self.assertEmpty(output)
Expand All @@ -329,7 +329,7 @@ def test_get_good_architecture_with_relevant_variables(self):
expected_output,
algorithm._get_good_architecture(
feature_extender, 20, np.array([0, 0.5, 0.6, -0.2] + [0] * 33),
set([3, 3, 3, 3])))
{3, 3, 3, 3}))


if __name__ == "__main__":
Expand Down
1 change: 1 addition & 0 deletions model_search/single_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, data, spec):
self._spec = spec

self._tuner_id = "tuner-1"
self._oracle = None

def try_models(self, number_models, train_steps, eval_steps, root_dir,
batch_size, experiment_name, experiment_owner):
Expand Down