@InProceedings{10.1007/978-3-030-87193-2_61,
author=””Ho, Ngoc-Vuong
and Nguyen, Tan
and Diep, Gia-Han
and Le, Ngan
and Hua, Binh-Son””,
editor=””de Bruijne, Marleen
and Cattin, Philippe C.
and Cotin, St{\’e}phane
and Padoy, Nicolas
and Speidel, Stefanie
and Zheng, Yefeng
and Essert, Caroline””,
title=””Point-Unet: A Context-Aware Point-Based Neural Network for Volumetric Segmentation””,
booktitle=””Medical Image Computing and Computer Assisted Intervention — MICCAI 2021″”,
year=””2021″”,
publisher=””Springer International Publishing””,
address=””Cham””,
pages=””644–655″”,
abstract=””Medical image analysis using deep learning has recently been prevalent, showing great performance for various downstream tasks including medical image segmentation and its sibling, volumetric image segmentation. Particularly, a typical volumetric segmentation network strongly relies on a voxel grid representation which treats volumetric data as a stack of individual voxel `slices’, which allows learning to segment a voxel grid to be as straightforward as extending existing image-based segmentation networks to the 3D domain. However, using a voxel grid representation requires a large memory footprint, expensive test-time and limiting the scalability of the solutions. In this paper, we propose Point-Unet, a novel method that incorporates the efficiency of deep learning with 3D point clouds into volumetric segmentation. Our key idea is to first predict the regions of interest in the volume by learning an attentional probability map, which is then used for sampling the volume into a sparse point cloud that is subsequently segmented using a point-based neural network. We have conducted the experiments on the medical volumetric segmentation task with both a small-scale dataset Pancreas and large-scale datasets BraTS18, BraTS19, and BraTS20 challenges. A comprehensive benchmark on different metrics has shown that our context-aware Point-Unet robustly outperforms the SOTA voxel-based networks at both accuracies, memory usage during training, and time consumption during testing. Our code is available at https://github.com/VinAIResearch/Point-Unet.””,
isbn=””978-3-030-87193-2″”
}