Skip to content

Commit

Permalink
[Cleanup][B-14] Replace to_variable (#61574)
Browse files Browse the repository at this point in the history
---------

Co-authored-by: Nyakku Shigure <sigure.qaq@gmail.com>
  • Loading branch information
zade23 and SigureMo authored Feb 5, 2024
1 parent ca68a91 commit da76b51
Show file tree
Hide file tree
Showing 10 changed files with 39 additions and 44 deletions.
5 changes: 2 additions & 3 deletions test/dygraph_to_static/test_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import paddle
from paddle import base
from paddle.base.dygraph import to_variable
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import Linear
from paddle.optimizer import Adam
Expand Down Expand Up @@ -209,8 +208,8 @@ def train(self, to_static=False):
.reshape(-1, 1)
)

img = to_variable(dy_x_data)
label = to_variable(y_data)
img = paddle.to_tensor(dy_x_data)
label = paddle.to_tensor(y_data)

label.stop_gradient = True
prediction, acc, avg_loss = mnist(img, label=label)
Expand Down
2 changes: 1 addition & 1 deletion test/dygraph_to_static/test_mobile_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ def predict_dygraph(args, data):
model.set_dict(model_dict)
model.eval()

pred_res = model(base.dygraph.to_variable(data))
pred_res = model(paddle.to_tensor(data))

return pred_res.numpy()

Expand Down
2 changes: 1 addition & 1 deletion test/legacy_test/test_mean_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ def test_base_api(self):

with base.dygraph.guard():
x_np = np.random.rand(10, 10).astype(np.float32)
x = base.dygraph.to_variable(x_np)
x = paddle.to_tensor(x_np)
out = paddle.mean(x=x, axis=1)
np.testing.assert_allclose(
out.numpy(), np.mean(x_np, axis=1), rtol=1e-05
Expand Down
18 changes: 8 additions & 10 deletions test/legacy_test/test_merged_adam_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,16 +47,14 @@ def run_adam_op(
paddle.disable_static()
paddle.set_device(place)

param_vars = [paddle.base.dygraph.to_variable(p) for p in params]
grad_vars = [paddle.base.dygraph.to_variable(g) for g in grads]
lr_vars = [paddle.base.dygraph.to_variable(l) for l in lrs]
moment1_vars = [paddle.base.dygraph.to_variable(m) for m in moment1s]
moment2_vars = [paddle.base.dygraph.to_variable(m) for m in moment2s]
beta1_pow_vars = [paddle.base.dygraph.to_variable(b) for b in beta1_pows]
beta2_pow_vars = [paddle.base.dygraph.to_variable(b) for b in beta2_pows]
master_param_vars = [
paddle.base.dygraph.to_variable(m_p) for m_p in master_params
]
param_vars = [paddle.to_tensor(p) for p in params]
grad_vars = [paddle.to_tensor(g) for g in grads]
lr_vars = [paddle.to_tensor(l) for l in lrs]
moment1_vars = [paddle.to_tensor(m) for m in moment1s]
moment2_vars = [paddle.to_tensor(m) for m in moment2s]
beta1_pow_vars = [paddle.to_tensor(b) for b in beta1_pows]
beta2_pow_vars = [paddle.to_tensor(b) for b in beta2_pows]
master_param_vars = [paddle.to_tensor(m_p) for m_p in master_params]

if not use_merged:
for i in range(len(param_vars)):
Expand Down
20 changes: 10 additions & 10 deletions test/legacy_test/test_meshgrid_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,8 +269,8 @@ def test_api_with_dygraph(self):
).astype('int32')

with base.dygraph.guard():
tensor_3 = base.dygraph.to_variable(input_3)
tensor_4 = base.dygraph.to_variable(input_4)
tensor_3 = paddle.to_tensor(input_3)
tensor_4 = paddle.to_tensor(input_4)
res_3, res_4 = paddle.tensor.meshgrid(tensor_3, tensor_4)

np.testing.assert_array_equal(res_3.shape, [100, 200])
Expand All @@ -295,8 +295,8 @@ def test_api_with_dygraph_list_input(self):
).astype('int32')

with base.dygraph.guard():
tensor_3 = base.dygraph.to_variable(input_3)
tensor_4 = base.dygraph.to_variable(input_4)
tensor_3 = paddle.to_tensor(input_3)
tensor_4 = paddle.to_tensor(input_4)
res_3, res_4 = paddle.tensor.meshgrid([tensor_3, tensor_4])

np.testing.assert_array_equal(res_3.shape, [100, 200])
Expand All @@ -321,8 +321,8 @@ def test_api_with_dygraph_tuple_input(self):
).astype('int32')

with base.dygraph.guard():
tensor_3 = base.dygraph.to_variable(input_3)
tensor_4 = base.dygraph.to_variable(input_4)
tensor_3 = paddle.to_tensor(input_3)
tensor_4 = paddle.to_tensor(input_4)
res_3, res_4 = paddle.tensor.meshgrid((tensor_3, tensor_4))

np.testing.assert_array_equal(res_3.shape, [100, 200])
Expand Down Expand Up @@ -372,15 +372,15 @@ def test_dygraph_api(self):
).astype('int32')

with base.dygraph.guard():
tensor_1 = base.dygraph.to_variable(input_1)
tensor_2 = base.dygraph.to_variable(input_2)
tensor_1 = paddle.to_tensor(input_1)
tensor_2 = paddle.to_tensor(input_2)
tensor_1.stop_gradient = False
tensor_2.stop_gradient = False
res_1, res_2 = paddle.tensor.meshgrid((tensor_1, tensor_2))
sum = paddle.add_n([res_1, res_2])
sum.backward()
tensor_eager_1 = base.dygraph.to_variable(input_1)
tensor_eager_2 = base.dygraph.to_variable(input_2)
tensor_eager_1 = paddle.to_tensor(input_1)
tensor_eager_2 = paddle.to_tensor(input_2)
tensor_eager_1.stop_gradient = False
tensor_eager_2.stop_gradient = False
res_eager_1, res_eager_2 = paddle.tensor.meshgrid(
Expand Down
12 changes: 6 additions & 6 deletions test/legacy_test/test_mse_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def test_NNMseLoss_mean(self):
with base.dygraph.guard():
mse_loss = paddle.nn.loss.MSELoss()
dy_ret = mse_loss(
base.dygraph.to_variable(input_np),
base.dygraph.to_variable(label_np),
paddle.to_tensor(input_np),
paddle.to_tensor(label_np),
)
dy_result = dy_ret.numpy()

Expand Down Expand Up @@ -160,8 +160,8 @@ def test_NNMseLoss_sum(self):
with base.dygraph.guard():
mse_loss = paddle.nn.loss.MSELoss(reduction='sum')
dy_ret = mse_loss(
base.dygraph.to_variable(input_np),
base.dygraph.to_variable(label_np),
paddle.to_tensor(input_np),
paddle.to_tensor(label_np),
)
dy_result = dy_ret.numpy()

Expand Down Expand Up @@ -205,8 +205,8 @@ def test_NNMseLoss_none(self):
with base.dygraph.guard():
mse_loss = paddle.nn.loss.MSELoss(reduction='none')
dy_ret = mse_loss(
base.dygraph.to_variable(input_np),
base.dygraph.to_variable(label_np),
paddle.to_tensor(input_np),
paddle.to_tensor(label_np),
)
dy_result = dy_ret.numpy()

Expand Down
2 changes: 1 addition & 1 deletion test/legacy_test/test_nansum_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def test_dygraph(self):
[[float('nan'), 3, 5, 9], [1, 2, float('-nan'), 7]]
).astype(np.float32)
with base.dygraph.guard():
inputs = base.dygraph.to_variable(x)
inputs = paddle.to_tensor(x)
out = paddle.nansum(inputs)
out_ref = np.array([27]).astype(np.float32)

Expand Down
2 changes: 1 addition & 1 deletion test/legacy_test/test_nn_functional_hot_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def test_api_with_dygraph(self):
).reshape([6, 1])
with base.dygraph.guard():
one_hot_label = functional.one_hot(
x=base.dygraph.to_variable(label), num_classes=num_classes
x=paddle.to_tensor(label), num_classes=num_classes
)


Expand Down
2 changes: 1 addition & 1 deletion test/legacy_test/test_nonzero_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def test_nonzero_api(self):
def test_dygraph_api(self):
data_x = np.array([[True, False], [False, True]])
with base.dygraph.guard():
x = base.dygraph.to_variable(data_x)
x = paddle.to_tensor(data_x)
z = paddle.nonzero(x)
np_z = z.numpy()
expect_out = np.array([[0, 0], [1, 1]])
Expand Down
18 changes: 8 additions & 10 deletions test/xpu/test_merged_adam_op_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,16 +55,14 @@ def run_adam_op(
paddle.disable_static()
paddle.set_device(place)

param_vars = [paddle.base.dygraph.to_variable(p) for p in params]
grad_vars = [paddle.base.dygraph.to_variable(g) for g in grads]
lr_vars = [paddle.base.dygraph.to_variable(l) for l in lrs]
moment1_vars = [paddle.base.dygraph.to_variable(m) for m in moment1s]
moment2_vars = [paddle.base.dygraph.to_variable(m) for m in moment2s]
beta1_pow_vars = [paddle.base.dygraph.to_variable(b) for b in beta1_pows]
beta2_pow_vars = [paddle.base.dygraph.to_variable(b) for b in beta2_pows]
master_param_vars = [
paddle.base.dygraph.to_variable(m_p) for m_p in master_params
]
param_vars = [paddle.to_tensor(p) for p in params]
grad_vars = [paddle.to_tensor(g) for g in grads]
lr_vars = [paddle.to_tensor(l) for l in lrs]
moment1_vars = [paddle.to_tensor(m) for m in moment1s]
moment2_vars = [paddle.to_tensor(m) for m in moment2s]
beta1_pow_vars = [paddle.to_tensor(b) for b in beta1_pows]
beta2_pow_vars = [paddle.to_tensor(b) for b in beta2_pows]
master_param_vars = [paddle.to_tensor(m_p) for m_p in master_params]

if not use_merged:
for i in range(len(param_vars)):
Expand Down

0 comments on commit da76b51

Please sign in to comment.