@inproceedings{fdi:010091540, title = {{S}emantic segmentation of sparse irregular point clouds for leaf/wood discrimination}, author = {{B}ai, {Y}. and {D}urand, {J}.{B}. and {V}incent, {G}r{\'e}goire and {F}orbes, {F}.}, editor = {}, language = {{ENG}}, abstract = {{L}i{DAR} ({L}ight {D}etection {A}nd {R}anging) has become an essential part of the remote sensing toolbox used for biosphere monitoring. {I}n particular, {L}i{DAR} provides the opportunity to map forest leaf area with unprecedented accuracy, while leaf area has remained an important source of uncertainty affecting models of gas exchanges between the vegetation and the atmosphere. {U}nmanned {A}erial {V}ehicles ({UAV}) are easy to mobilize and therefore allow frequent revisits, so as to track the response of vegetation to climate change. {H}owever, miniature sensors embarked on {UAV}s usually provide point clouds of limited density, which are further affected by a strong decrease in density from top to bottom of the canopy due to progressively stronger occlusion. {I}n such a context, discriminating leaf points from wood points presents a significant challenge due in particular to strong class imbalance and spatially irregular sampling intensity. {H}ere we introduce a neural network model based on the {P}ointnet ++ architecture which makes use of point geometry only (excluding any spectral information). {T}o cope with local data sparsity, we propose an innovative sampling scheme which strives to preserve local important geometric information. {W}e also propose a loss function adapted to the severe class imbalance. {W}e show that our model outperforms state-of-the-art alternatives on {UAV} point clouds. {W}e discuss future possible improvements, particularly regarding much denser point clouds acquired from below the canopy.}, keywords = {}, numero = {}, pages = {48293--48313}, booktitle = {{NIPS} '23: {P}roceedings of the 37th {I}nternational {C}onference on {N}eural {I}nformation {P}rocessing {S}ystems}, year = {2024}, URL = {https://www.documentation.ird.fr/hor/fdi:010091540}, }