Skip to content

Commit

Permalink
add unit test
Browse files Browse the repository at this point in the history
  • Loading branch information
Wuziyi616 committed Aug 8, 2021
1 parent 140693b commit cdc3386
Showing 1 changed file with 72 additions and 0 deletions.
72 changes: 72 additions & 0 deletions tests/test_models/test_segmentors.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,3 +231,75 @@ def test_paconv_ssg():
results = self.forward(return_loss=False, **data_dict)
assert results[0]['semantic_mask'].shape == torch.Size([200])
assert results[1]['semantic_mask'].shape == torch.Size([100])


def test_paconv_cuda_ssg():
if not torch.cuda.is_available():
pytest.skip('test requires GPU and torch+cuda')

set_random_seed(0, True)
paconv_cuda_ssg_cfg = _get_segmentor_cfg(
'paconv/paconv_cuda_ssg_8x8_cosine_200e_s3dis_seg-3d-13class.py')
# for GPU memory consideration
paconv_cuda_ssg_cfg.backbone.num_points = (256, 64, 16, 4)
paconv_cuda_ssg_cfg.test_cfg.num_points = 32
self = build_segmentor(paconv_cuda_ssg_cfg).cuda()
points = [torch.rand(1024, 9).float().cuda() for _ in range(2)]
img_metas = [dict(), dict()]
gt_masks = [torch.randint(0, 13, (1024, )).long().cuda() for _ in range(2)]

# test forward_train
losses = self.forward_train(points, img_metas, gt_masks)
assert losses['decode.loss_sem_seg'].item() >= 0
assert losses['regularize.loss_regularize'].item() >= 0

# test forward function
set_random_seed(0, True)
data_dict = dict(
points=points, img_metas=img_metas, pts_semantic_mask=gt_masks)
forward_losses = self.forward(return_loss=True, **data_dict)
assert np.allclose(losses['decode.loss_sem_seg'].item(),
forward_losses['decode.loss_sem_seg'].item())
assert np.allclose(losses['regularize.loss_regularize'].item(),
forward_losses['regularize.loss_regularize'].item())

# test loss with ignore_index
ignore_masks = [torch.ones_like(gt_masks[0]) * 13 for _ in range(2)]
losses = self.forward_train(points, img_metas, ignore_masks)
assert losses['decode.loss_sem_seg'].item() == 0

# test simple_test
self.eval()
with torch.no_grad():
scene_points = [
torch.randn(200, 6).float().cuda() * 3.0,
torch.randn(100, 6).float().cuda() * 2.5
]
results = self.simple_test(scene_points, img_metas)
assert results[0]['semantic_mask'].shape == torch.Size([200])
assert results[1]['semantic_mask'].shape == torch.Size([100])

# test forward function calling simple_test
with torch.no_grad():
data_dict = dict(points=[scene_points], img_metas=[img_metas])
results = self.forward(return_loss=False, **data_dict)
assert results[0]['semantic_mask'].shape == torch.Size([200])
assert results[1]['semantic_mask'].shape == torch.Size([100])

# test aug_test
with torch.no_grad():
scene_points = [
torch.randn(2, 200, 6).float().cuda() * 3.0,
torch.randn(2, 100, 6).float().cuda() * 2.5
]
img_metas = [[dict(), dict()], [dict(), dict()]]
results = self.aug_test(scene_points, img_metas)
assert results[0]['semantic_mask'].shape == torch.Size([200])
assert results[1]['semantic_mask'].shape == torch.Size([100])

# test forward function calling aug_test
with torch.no_grad():
data_dict = dict(points=scene_points, img_metas=img_metas)
results = self.forward(return_loss=False, **data_dict)
assert results[0]['semantic_mask'].shape == torch.Size([200])
assert results[1]['semantic_mask'].shape == torch.Size([100])

0 comments on commit cdc3386

Please # to comment.