Skip to content

Commit

Permalink
Update hyperparameters for model paths
Browse files Browse the repository at this point in the history
  • Loading branch information
oarriaga committed Nov 8, 2019
1 parent 59978a4 commit 06ccc8e
Showing 1 changed file with 23 additions and 18 deletions.
41 changes: 23 additions & 18 deletions examples/discovery_of_latent_keypoints/discover_latent_keypoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@

import json
import argparse

from paz.models import KeypointNetShared
from paz.models import Projector
from paz.pipelines import KeypointInference
from paz.pipelines import KeypointSharedAugmentation
from paz.core.sequencer import GeneratingSequencer
from paz.optimization.callbacks import DrawInferences
from paz.optimization import KeypointNetLoss
from paz.optimization.callbacks import DrawInferences

from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import CSVLogger, ModelCheckpoint
Expand All @@ -23,13 +24,19 @@

description = 'Training script for learning latent 3D keypoints'
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
'-op', '--obj_path',
default=os.path.join(
os.path.expanduser('~'),
'.keras/paz/datasets/ycb/models/035_power_drill/textured.obj'),
type=str, help='Path for writing model weights and logs')
parser.add_argument('-cl', '--class_name', default='035_power_drill', type=str,
help='``FERPlus``, ``FER`` or ``IMDB``')
help='Class name to be added to model save path')
parser.add_argument('-nk', '--num_keypoints', default=10, type=int,
help='Number of keypoints to be learned')
parser.add_argument('-f', '--filters', default=64, type=int,
help='Number of filters in convolutional blocks')
parser.add_argument('-bs', '--batch_size', default=5, type=int,
parser.add_argument('-bs', '--batch_size', default=20, type=int,
help='Batch size for training')
parser.add_argument('-lr', '--learning_rate', default=0.001, type=float,
help='Initial learning rate for Adam')
Expand All @@ -39,12 +46,10 @@
help='Number of epochs before doing early stopping')
parser.add_argument('-pp', '--plateau_patience', default=3, type=int,
help='Number of epochs before reducing learning rate')
parser.add_argument('-e', '--max_num_epochs', default=2, type=int,
parser.add_argument('-e', '--max_num_epochs', default=10000, type=int,
help='Maximum number of epochs before finishing')
parser.add_argument('-st', '--steps_per_epoch', default=3, type=int,
parser.add_argument('-st', '--steps_per_epoch', default=1000, type=int,
help='Steps per epoch')
parser.add_argument('-sa', '--save_path', default='trained_models/',
type=str, help='Path for writing model weights and logs')
parser.add_argument('-sh', '--sphere', default='full',
choices=['full', 'half'], type=str,
help='Flag for full sphere or top half for rendering')
Expand All @@ -64,16 +69,17 @@
help='Background color')
parser.add_argument('-ap', '--alpha', default=0.1, type=float,
help='Alpha leaky-relu parameter')
parser.add_argument('-nm', '--num_mean_samples', default=1000, type=int,
help='Number of samples used to calculate keypoints mean')
parser.add_argument('-sa', '--save_path',
default=os.path.join(
os.path.expanduser('~'), '.keras/paz/models'),
type=str, help='Path for writing model weights and logs')
args = parser.parse_args()


class_name = '035_power_drill'
OBJ_filepath = '.keras/altamira/datasets/models/%s/textured.obj' % class_name
OBJ_filepath = os.path.join(os.path.expanduser('~'), OBJ_filepath)
save_path = os.path.join(os.path.expanduser('~'), '.keras/paz/models/')

# setting scene
scene = MultiView(OBJ_filepath, (args.image_size, args.image_size),
scene = MultiView(args.obj_path, (args.image_size, args.image_size),
args.y_fov, args.depth, args.sphere, args.roll,
args.translation, args.shift, args.light, args.background)
focal_length = scene.camera.get_projection_matrix()[0, 0]
Expand Down Expand Up @@ -101,20 +107,19 @@
# model compilation
optimizer = Adam(args.learning_rate, amsgrad=True)
model.compile(optimizer, losses, metrics)
model_name = '_'.join([model.name, str(args.num_keypoints), class_name])
model_name = '_'.join([model.name, str(args.num_keypoints), args.class_name])
model.summary()

# making directory for saving model weights and logs
save_path = os.path.join(save_path, model_name)
save_path = os.path.join(args.save_path, model_name)
if not os.path.exists(save_path):
os.makedirs(save_path)

# setting callbacks
log = CSVLogger(os.path.join(save_path, '%s.log' % model_name))
stop = EarlyStopping('loss', patience=args.stop_patience, verbose=1)
plateau = ReduceLROnPlateau('loss', patience=args.plateau_patience, verbose=1)
inferencer = KeypointInference(
model.get_layer('keypointnet'), args.num_keypoints, to_BGR=True)
inferencer = KeypointInference(model.get_layer('keypointnet'))
images = (sequencer.__getitem__(0)[0][0] * 255).astype('uint8')
draw = DrawInferences(save_path, images, inferencer)
model_path = os.path.join(save_path, '%s_weights.hdf5' % model_name)
Expand Down Expand Up @@ -143,7 +148,7 @@
# rendering multiple scenes for forward passing
keypoints_set, projector = [], Projector(focal_length, True)
print('Calculating mean and variance of discovered keypoints...')
num_forward_passes = 1000 # number of samples for calculating the mean
num_forward_passes = args.num_mean_samples
sequencer.batch_size = 1 # changing batch size of sequencer
progress_bar = Progbar(num_forward_passes)
for batch_arg in range(num_forward_passes):
Expand Down

0 comments on commit 06ccc8e

Please sign in to comment.