@article {Cesetti2011157, title = {A visual global positioning system for unmanned aerial vehicles used in photogrammetric applications}, journal = {Journal of Intelligent and Robotic Systems: Theory and Applications}, volume = {61}, number = {1-4}, year = {2011}, note = {cited By 10}, pages = {157-168}, abstract = {The combination of photogrammetric aerial and terrestrial recording methods can provide new opportunities for photogrammetric applications. A UAV (Unmanned Aerial Vehicle), in our case a helicopter system, can cover both the aerial and quasi-terrestrial image acquisition methods. A UAV can be equipped with an on-board high resolution camera and a priori knowledge of the operating area where to perform photogrammetric tasks. In this general scenario our paper proposes vision-based techniques for localizing a UAV. Only natural landmarks provided by a feature tracking algorithm will be considered, without the help of visual beacons or landmarks with known positions. The novel idea is to perform global localization, position tracking and localization failure recovery (kidnapping) based only on visual matching between current view and available georeferenced satellite images. The matching is based on SIFT features and the system estimates the position of the UAV and its altitude on the base of the reference image. The vision system replaces the GPS signal combining position information from visual odometry and georeferenced imagery. Georeferenced satellite or aerial images must be available on-board beforehand or downloaded during the flight. The growing availability of high resolution satellite images (e.g., provided by Google Earth or other local information sources) makes this topic very interesting and timely. Experiments with both synthetic (i.e., taken from satellites or datasets and pre elaborated) and real world images have been performed to test the accuracy and the robustness of our method. Results show sufficient performance if compared with common GPS systems and give a good performance also in the altitude estimation, even if in this last case there are only preliminary results. {\textcopyright} 2010 Springer Science+Business Media B.V.}, doi = {10.1007/s10846-010-9489-5}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-79951511981\&partnerID=40\&md5=22fb7414e9a79d5c70082f7e0e8c0684}, author = {Cesetti, A. and Emanuele Frontoni and Adriano Mancini and Ascani, A. and Primo Zingaretti and Sauro Longhi} } @conference {Cesetti2010125, title = {Autonomous safe landing of a vision guided helicopter}, booktitle = {Proceedings of 2010 IEEE/ASME International Conference on Mechatronic and Embedded Systems and Applications, MESA 2010}, year = {2010}, note = {cited By 2}, pages = {125-130}, abstract = {In this paper a vision-based system for safe autonomous landing of a helicopter-based Unmanned Aerial Vehicle (UAV) is presented. The remote user selects target areas from high resolution aerial or satellite images. These areas are tracked by a feature-based image matching algorithm that identifies natural landmarks and gives feedbacks for control purposes. {\textcopyright} 2010 IEEE.}, doi = {10.1109/MESA.2010.5552081}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-77957363095\&partnerID=40\&md5=105d5126c8256e58ab2a67b66bd8c271}, author = {Cesetti, A. and Emanuele Frontoni and Adriano Mancini and Primo Zingaretti} } @article {Cesetti2010233, title = {A Vision-based guidance system for UAV navigation and safe landing using natural landmarks}, journal = {Journal of Intelligent and Robotic Systems: Theory and Applications}, volume = {57}, number = {1-4}, year = {2010}, note = {cited By 45}, pages = {233-257}, abstract = {In this paper a vision-based approach for guidance and safe landing of an Unmanned Aerial Vehicle (UAV) is proposed. The UAV is required to navigate from an initial to a final position in a partially known environment. The guidance system allows a remote user to define target areas from a high resolution aerial or satellite image to determine either the waypoints of the navigation trajectory or the landing area. A feature-based image-matching algorithm finds the natural landmarks and gives feedbacks to an onboard, hierarchical, behaviour-based control system for autonomous navigation and landing. Two algorithms for safe landing area detection are also proposed, based on a feature optical flow analysis. The main novelty is in the vision-based architecture, extensively tested on a helicopter, which, in particular, does not require any artificial landmark (e.g., helipad). Results show the appropriateness of the vision-based approach, which is robust to occlusions and light variations. {\textcopyright} 2009 Springer Science+Business Media B.V.}, doi = {10.1007/s10846-009-9373-3}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84893415343\&partnerID=40\&md5=1af91a05c14c688e97131ab24f43e2bd}, author = {Cesetti, A. and Emanuele Frontoni and Adriano Mancini and Primo Zingaretti and Sauro Longhi} } @article {Mancini2009307, title = {A framework for simulation and testing of UAVs in cooperative scenarios}, journal = {Journal of Intelligent and Robotic Systems: Theory and Applications}, volume = {54}, number = {1-3 SPEC. ISS.}, year = {2009}, note = {cited By 18}, pages = {307-329}, abstract = {Today, Unmanned Aerial Vehicles (UAVs) have deeply modified the concepts of surveillance, Search\&Rescue, aerial photogrammetry, mapping, etc. The kinds of missions grow continuously; missions are in most cases performed by a fleet of cooperating autonomous and heterogeneous vehicles. These systems are really complex and it becomes fundamental to simulate any mission stage to exploit benefits of simulations like repeatability, modularity and low cost. In this paper a framework for simulation and testing of UAVs in cooperative scenarios is presented. The framework, based on modularity and stratification in different specialized layers, allows an easy switching from simulated to real environments, thus reducing testing and debugging times, especially in a training context. Results obtained using the proposed framework on some test cases are also reported. {\textcopyright} 2008 Springer Science+Business Media B.V.}, doi = {10.1007/s10846-008-9268-8}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-56649092912\&partnerID=40\&md5=dc161c1b0a0533432e58fcdf588aec40}, author = {Adriano Mancini and Cesetti, A. and Iual{\`e}, A. and Emanuele Frontoni and Primo Zingaretti and Sauro Longhi} } @conference {Cesetti2009, title = {A single-camera feature-based vision system for helicopter autonomous landing}, booktitle = {2009 International Conference on Advanced Robotics, ICAR 2009}, year = {2009}, note = {cited By 4}, abstract = {In this paper a feature based single camera vision system for the safe landing of an Unmanned Aerial Vehicle (UAV) is proposed. The autonomous helicopter used for tests is required to navigate from an initial to a final position in a partially known environment, to locate a landing area and to land on it. The algorithm proposed for the detection of safe landing areas is based on the analysis of optical flow and of mutual geometric position of different kinds of features, observed from different points of view. Vision allows estimating the position and velocity of a set of features with respect to the helicopter while the onboard, hierarchical, behavior-based control system autonomously guides the helicopter. Results, obtained using real data and a real helicopter in a outdoor scenario, show the appropriateness of the vision-based approach. It does not require any artificial landmark (e.g., helipad), is able to estimate correctly and autonomously safe landing areas and is quite robust to occlusions.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-70449334565\&partnerID=40\&md5=3373a2496d17d58330e7a887f552c6ba}, author = {Cesetti, A. and Emanuele Frontoni and Adriano Mancini and Primo Zingaretti and Sauro Longhi} } @conference {Cesetti2009910, title = {Vision-based autonomous navigation and landing of an unmanned aerial vehicle using natural landmarks}, booktitle = {2009 17th Mediterranean Conference on Control and Automation, MED 2009}, year = {2009}, note = {cited By 13}, pages = {910-915}, abstract = {This paper presents the design and implementation of a vision-based navigation and landing algorithm for an autonomous helicopter. The vision system allows to define target areas from a high resolution aerial or satellite image to determine the waypoints of the navigation trajectory or the landing area. The helicopter is required to navigate from an initial position to a final position in a partially known environment using GPS and vision, to locate a landing target (a helipad of a known shape or a natural landmark) and to land on it. The vision system, using a feature-based image matching algorithm, finds the area and gives feedbacks to the control system for autonomous landing. Vision is used for accurate target detection, recognition and tracking. The helicopter updates its landing target parameters owing to vision and uses an on board behavior-based controller to follow a path to the landing site. Results show the appropriateness of the vision-based approach that does not require any artificial landmark (e.g., helipad) and is quite robust to occlusions, light variations and seasonal changes (e.g., brown or green leaves). {\textcopyright} 2009 IEEE.}, doi = {10.1109/MED.2009.5164661}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84885338997\&partnerID=40\&md5=e544d46c421bd62be9c0ad9fa4ac016b}, author = {Cesetti, A. and Emanuele Frontoni and Adriano Mancini and Primo Zingaretti and Sauro Longhi} } @article {Cesetti200817, title = {From simulated to real scenarios: A framework for multi-UAVs}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, volume = {5325 LNAI}, year = {2008}, note = {cited By 2}, pages = {17-28}, abstract = {In this paper a framework for simulation of Unmanned Aerial Vehicles (UAVs), oriented to rotary wings aerial vehicles, is presented. It allows UAV simulations for stand-alone agents or multi-agents exchanging data in cooperative scenarios. The framework, based on modularity and stratification in different specialized layers, allows an easy switching from simulated to real environments, thus reducing testing and debugging times. CAD modelling supports the framework mainly with respect to extraction of geometrical parameters and virtualization. Useful applications of the framework include pilot training, testing and validation of UAVs control strategies, especially in an educational context, and simulation of complex missions. {\textcopyright} 2008 Springer Berlin Heidelberg.}, doi = {10.1007/978-3-540-89076-8-6}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-58049115678\&partnerID=40\&md5=8301ffc2a6a97aefca4df012079ad726}, author = {Cesetti, A. and Adriano Mancini and Emanuele Frontoni and Primo Zingaretti and Sauro Longhi} }