@inproceedings{bd7f3234a3a7450cbbad1b7ca2caa904,
title = "Association of Camera and Radar Detections Using Neural Networks",
abstract = "Automotive radar and camera fusion relies on linear point transformations from one sensor's coordinate system to the other. However, these transformations cannot handle non-linear dynamics and are susceptible to sensor noise. Furthermore, they operate on a point-to-point basis, so it is impossible to capture all the characteristics of an object. This paper introduces a method that performs detection-to-detection association by projecting heterogeneous object features from the two sensors into a common high-dimensional space. We associate 2D bounding boxes and radar detections based on the Euclidean distance between their projections. Our method utilizes deep neural networks to transform feature vectors instead of single points. Therefore, we can leverage real-world data to learn non-linear dynamics and utilize several features to provide a better description for each object. We evaluate our association method against a traditional rule-based method, showing that it improves the accuracy of the association algorithm and it is more robust in complex scenarios with multiple objects.",
keywords = "n/a OA procedure, radar, sensor fusion, camera",
author = "Konstantinos Fatseas and Bekooij, {Marco J.G.}",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; IEEE Radar Conference, RadarConf 2023, RadarConf 2023 ; Conference date: 01-05-2023 Through 05-05-2023",
year = "2023",
month = jun,
day = "21",
doi = "10.1109/RadarConf2351548.2023.10149729",
language = "English",
series = "Proceedings of the IEEE Radar Conference",
publisher = "IEEE",
booktitle = "RadarConf23 - 2023 IEEE Radar Conference, Proceedings",
address = "United States",
}