Skip to content

Commit

Permalink
Add TAR and AR (castorini#172)
Browse files Browse the repository at this point in the history
* Add TAR and AR
  • Loading branch information
Ashutosh-Adhikari authored and daemon committed Jan 25, 2019
1 parent dc086e8 commit d326e57
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 4 deletions.
4 changes: 3 additions & 1 deletion common/trainers/reuters_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ def train_epoch(self, epoch):
loss = F.binary_cross_entropy_with_logits(scores, batch.label.float())

if hasattr(self.model, 'TAR') and self.model.TAR:
loss = loss + (rnn_outs[1:] - rnn_outs[:-1]).pow(2).mean()
loss = loss + self.model.TAR*(rnn_outs[1:] - rnn_outs[:-1]).pow(2).mean()
if hasattr(self.model, 'AR') and self.model.AR:
loss = loss + self.model.AR*(rnn_outs[:]).pow(2).mean()

n_total += batch.batch_size
train_acc = 100. * n_correct / n_total
Expand Down
3 changes: 2 additions & 1 deletion lstm_regularization/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ def get_args():
default=os.path.join(os.pardir, 'Castor-data', 'embeddings', 'word2vec'))
parser.add_argument('--word_vectors_file', help='word vectors filename', default='GoogleNews-vectors-negative300.txt')
parser.add_argument('--trained_model', type=str, default="")
parser.add_argument('--TAR', action='store_true')
parser.add_argument('--TAR', type=float, default=0.0, help="Hyperparameter for Temporal Activation Regularization")
parser.add_argument('--AR', type=float, default=0.0, help="Hyperparameter for Activation Regularization")
parser.add_argument('--weight_decay', type=float, default=0)
parser.add_argument('--beta_ema', type=float, default = 0, help="for temporal averaging")
parser.add_argument('--wdrop', type=float, default=0.0, help="for weight-drop")
Expand Down
5 changes: 3 additions & 2 deletions lstm_regularization/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def __init__(self, config):
self.has_bottleneck_layer = config.bottleneck_layer
self.mode = config.mode
self.TAR = config.TAR
self.AR = config.AR
self.beta_ema = config.beta_ema ## Temporal averaging
self.wdrop = config.wdrop ## Weight dropping
self.embed_droprate = config.embed_droprate ## Embedding dropout
Expand Down Expand Up @@ -84,11 +85,11 @@ def forward(self, x, lengths=None):
if self.has_bottleneck_layer:
x = F.relu(self.fc1(x))
# x = self.dropout(x)
if self.TAR:
if self.TAR or self.AR:
return self.fc2(x), rnn_outs.permute(1,0,2)
return self.fc2(x)
else:
if self.TAR:
if self.TAR or self.AR:
return self.fc1(x), rnn_outs.permute(1,0,2)
return self.fc1(x)

Expand Down

0 comments on commit d326e57

Please sign in to comment.