@article{07af208802c44ec3a5b503114779b20f,
title = "Multi-Temporal LiDAR and Hyperspectral Data Fusion for Classification of Semi-Arid Woody Cover Species",
abstract = "Mapping the spatial distribution of woody vegetation is important for monitoring, man-aging, and studying woody encroachment in grasslands. However, in semi-arid regions, remotely sensed discrimination of tree species is difficult primarily due to the tree similarities, small and sparse canopy cover, but may also be due to overlapping woody canopies as well as seasonal leaf retention (deciduous versus evergreen) characteristics. Similar studies in different biomes have achieved low accuracies using coarse spatial resolution image data. The objective of this study was to investigate the use of multi-temporal, airborne hyperspectral imagery and light detection and ranging (LiDAR) derived data for tree species classification in a semi-arid desert region. This study produces highly accurate classifications by combining multi-temporal fine spatial resolution hyperspectral and LiDAR data (~1 m) through a reproducible scripting and machine learning approach that can be applied to larger areas and similar datasets. Combining multi-temporal vegetation indices and canopy height models led to an overall accuracy of 95.28% and kappa of 94.17%. Five woody species were discriminated resulting in producer accuracies ranging from 86.12% to 98.38%. The influence of fusing spectral and structural information in a random forest classifier for tree identification is evident. Additionally, a multi-temporal dataset slightly increases classification accuracies over a single data collection. Our results show a promising methodology for tree species classification in a semi-arid region using multi-temporal hyperspectral and LiDAR remote sensing data.",
keywords = "LiDAR, hyperspectral, semi-arid, species classification",
author = "Norton, {Cynthia L.} and Kyle Hartfield and Collins, {Chandra D.Holifield} and {van Leeuwen}, {Willem J.D.} and Metz, {Loretta J.}",
note = "Funding Information: This study utilized datasets captured by both manned and unmanned aircraft systems (MAS and UAS). Hyperspectral and LiDAR data were collected by the National Ecological Observatory Network (NEON), while high spatial resolution digital color images were collected using various UASs. NEON is funded by the National Science Foundation (NSF) to collect 30 years of long-term open access ecological data for understanding, predicting, and observing U.S. ecosystem change. NEON airborne remote sensing surveys are conducted during peak greenness (90% maximum foliage) seasons at various sites on a national level, using the Airborne Observation Platform (AOP) installed on a light MAS. The NEON AOP collected three years of hyperspectral and LiDAR at the SRER during 2017 (collection data: 24–30 August), 2018 (collection data: 24–28 August), and 2019 (collection data: 1–13 September). Flight surveys were conducted at a low altitude (1000 m AGL) and utilized a minimum of 10 km2 area flight box design (survey boundaries) to produce consistent nominal 1 m spatial resolution datasets. Data products were published categorically by levels 1, 2, and 3 (L1, L2, L3) based on the complexity of processing steps. (https://www.neonscience.org/data-collection/airborne-remote-sensing, accessed on 3 December 2021). Level 1 point cloud and level 3 hyperspectral data products were used in this study. Each year had some data gaps and bad pixels due to cloud cover and were excluded from the analysis. Funding Information: Funding: This material is based upon work supported by the U.S. Department of Agriculture, Natural Resources Conservation Service, Conservation Effects Assessment Project-Grazing Lands component, under agreement number NRC21IRA0010783. Publisher Copyright: {\textcopyright} 2022 by the authors. Licensee MDPI, Basel, Switzerland.",
year = "2022",
month = jun,
day = "1",
doi = "10.3390/rs14122896",
language = "English (US)",
volume = "14",
journal = "Remote Sensing",
issn = "2072-4292",
publisher = "Multidisciplinary Digital Publishing Institute (MDPI)",
number = "12",
}