Skip to content

Commit

Permalink
update ptb to support remove build once; test=develop (PaddlePaddle#4100
Browse files Browse the repository at this point in the history
)
  • Loading branch information
phlrain authored and zhiqiu committed Dec 23, 2019
1 parent 54f64c6 commit 64410f0
Showing 1 changed file with 2 additions and 7 deletions.
9 changes: 2 additions & 7 deletions dygraph/ptb_lm/ptb_dy.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,12 @@

class SimpleLSTMRNN(fluid.Layer):
def __init__(self,
name_scope,
hidden_size,
num_steps,
num_layers=2,
init_scale=0.1,
dropout=None):
super(SimpleLSTMRNN, self).__init__(name_scope)
super(SimpleLSTMRNN, self).__init__()
self._hidden_size = hidden_size
self._num_layers = num_layers
self._init_scale = init_scale
Expand Down Expand Up @@ -132,29 +131,26 @@ def forward(self, input_embedding, init_hidden=None, init_cell=None):

class PtbModel(fluid.Layer):
def __init__(self,
name_scope,
hidden_size,
vocab_size,
num_layers=2,
num_steps=20,
init_scale=0.1,
dropout=None):
super(PtbModel, self).__init__(name_scope)
super(PtbModel, self).__init__()
self.hidden_size = hidden_size
self.vocab_size = vocab_size
self.init_scale = init_scale
self.num_layers = num_layers
self.num_steps = num_steps
self.dropout = dropout
self.simple_lstm_rnn = SimpleLSTMRNN(
self.full_name(),
hidden_size,
num_steps,
num_layers=num_layers,
init_scale=init_scale,
dropout=dropout)
self.embedding = Embedding(
self.full_name(),
size=[vocab_size, hidden_size],
dtype='float32',
is_sparse=False,
Expand Down Expand Up @@ -286,7 +282,6 @@ def train_ptb_lm():
fluid.default_main_program().random_seed = seed
max_epoch = 1
ptb_model = PtbModel(
"ptb_model",
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
Expand Down

0 comments on commit 64410f0

Please sign in to comment.