@inproceedings{fdi:010070127, title = {{M}anhole cover localization in aerial images with a deep learning approach}, author = {{C}ommandre, {B}. and {E}n-{N}ejjary, {D}. and {P}ibre, {L}. and {C}haumont, {M}. and {D}elenne, {C}. and {C}hahinian, {N}an{\'e}e}, editor = {}, language = {{ENG}}, abstract = {{U}rban growth is an ongoing trend and one of its direct consequences is the development of buried utility networks. {L}ocating these networks is becoming a challenging task. {W}hile the labeling of large objects in aerial images is extensively studied in {G}eosciences, the localization of small objects (smaller than a building) is in counter part less studied and very challenging due to the variance of object colors, cluttered neighborhood, non-uniform background, shadows and aspect ratios. {I}n this paper, we put forward a method for the automatic detection and localization of manhole covers in {V}ery {H}igh {R}esolution ({VHR}) aerial and remotely sensed images using a {C}onvolutional {N}eural {N}etwork ({CNN}). {C}ompared to other detection/localization methods for small objects, the proposed approach is more comprehensive as the entire image is processed without prior segmentation. {T}he first experiments using the {P}rades-{L}e-{L}ez and {G}igean datasets show that our method is indeed effective as more than 49% of the ground truth database is detected with a precision of 75 %. {N}ew improvement possibilities are being explored such as using information on the shape of the detected objects and increasing the types of objects to be detected, thus enabling the extraction of more object specific features.}, keywords = {{URBANISATION} ; {OCCUPATION} {SPATIALE} ; {TRAITEMENT} {D}'{IMAGE} ; {PHOTOGRAPHIE} {AERIENNE} ; {RESOLUTION} {SPATIALE} ; {FRANCE}}, numero = {42/{W}1}, pages = {333--338}, booktitle = {{ISPRS} {H}annover workshop}, year = {2017}, DOI = {10.5194/isprs-archives-{XLII}-1-{W}1-333-2017}, URL = {https://www.documentation.ird.fr/hor/fdi:010070127}, }