Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

【Fix PIR Unittest BUAA】fix part test_where_op.py #66399

Merged
merged 1 commit into from
Jul 24, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 57 additions & 30 deletions test/deprecated/legacy_test/test_where_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,19 +140,24 @@ def test_api(self, use_cuda=False):
cond = paddle.static.data(
name='cond', shape=[-1] + self.shape, dtype='bool'
)
cond.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
cond.desc.set_need_check_feed(False)
x = paddle.static.data(
name='x', shape=[-1] + self.shape, dtype='float32'
)
x.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
x.desc.set_need_check_feed(False)
y = paddle.static.data(
name='y', shape=[-1] + self.shape, dtype='float32'
)
y.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
y.desc.set_need_check_feed(False)
x.stop_gradient = x_stop_gradient
x.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
x.desc.set_need_check_feed(False)
y.stop_gradient = y_stop_gradient
y.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
y.desc.set_need_check_feed(False)
result = paddle.where(cond, x, y)
result.stop_gradient = False
append_backward(paddle.mean(result))
Expand All @@ -163,29 +168,46 @@ def test_api(self, use_cuda=False):
base.CUDAPlace(0) if use_cuda else base.CPUPlace()
)
exe = base.Executor(place)
fetch_list = [result, result.grad_name]
if x_stop_gradient is False:
fetch_list.append(x.grad_name)
if y_stop_gradient is False:
fetch_list.append(y.grad_name)
out = exe.run(
paddle.static.default_main_program(),
feed={'cond': self.cond, 'x': self.x, 'y': self.y},
fetch_list=fetch_list,
)
np.testing.assert_array_equal(out[0], self.out)
if x_stop_gradient is False:
np.testing.assert_array_equal(
out[2], self.ref_x_backward(out[1])
if paddle.framework.use_pir_api():
fetch_list = [result]
out = exe.run(
paddle.static.default_main_program(),
feed={
'cond': self.cond,
'x': self.x,
'y': self.y,
},
fetch_list=fetch_list,
)
if y.stop_gradient is False:
np.testing.assert_array_equal(out[0], self.out)
else:
fetch_list = [result, result.grad_name]
if x_stop_gradient is False:
fetch_list.append(x.grad_name)
if y_stop_gradient is False:
fetch_list.append(y.grad_name)
out = exe.run(
paddle.static.default_main_program(),
feed={
'cond': self.cond,
'x': self.x,
'y': self.y,
},
fetch_list=fetch_list,
)
np.testing.assert_array_equal(out[0], self.out)
if x_stop_gradient is False:
np.testing.assert_array_equal(
out[3], self.ref_y_backward(out[1])
out[2], self.ref_x_backward(out[1])
)
if y.stop_gradient is False:
np.testing.assert_array_equal(
out[3], self.ref_y_backward(out[1])
)
elif y.stop_gradient is False:
np.testing.assert_array_equal(
out[2], self.ref_y_backward(out[1])
)
elif y.stop_gradient is False:
np.testing.assert_array_equal(
out[2], self.ref_y_backward(out[1])
)

def test_pir_api(self, use_cuda=False):
for x_stop_gradient in [False, True]:
Expand Down Expand Up @@ -756,7 +778,8 @@ def test_where_condition(self):
data = np.array([[True, False], [False, True]])
with program_guard(Program(), Program()):
x = paddle.static.data(name='x', shape=[(-1), 2], dtype='float32')
x.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
x.desc.set_need_check_feed(False)
y = paddle.where(x)
self.assertEqual(type(y), tuple)
self.assertEqual(len(y), 2)
Expand All @@ -770,7 +793,8 @@ def test_where_condition(self):
data = np.array([True, True, False])
with program_guard(Program(), Program()):
x = paddle.static.data(name='x', shape=[(-1)], dtype='float32')
x.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
x.desc.set_need_check_feed(False)
y = paddle.where(x)
self.assertEqual(type(y), tuple)
self.assertEqual(len(y), 1)
Expand Down Expand Up @@ -808,15 +832,18 @@ def test_type():
x = paddle.static.data(
name='x', shape=[-1, 4], dtype='bool'
)
x.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
x.desc.set_need_check_feed(False)
y = paddle.static.data(
name='y', shape=[-1, 4], dtype='float16'
)
y.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
y.desc.set_need_check_feed(False)
cond = paddle.static.data(
name='cond', shape=[-1, 4], dtype='int32'
)
cond.desc.set_need_check_feed(False)
if not paddle.framework.use_pir_api():
cond.desc.set_need_check_feed(False)
paddle.where(cond, x, y)

self.assertRaises(TypeError, test_type)
Expand Down