Lenotary 3 роки тому
коміт
a2e9cb769f

+ 17 - 0
.github/stale.yml

@@ -0,0 +1,17 @@
+# Number of days of inactivity before an issue becomes stale
+daysUntilStale: 7
+# Number of days of inactivity before a stale issue is closed
+daysUntilClose: 1
+# Issues with these labels will never be considered stale
+exemptLabels:
+  - pinned
+  - security
+# Label to use when marking an issue as stale
+staleLabel: stale
+# Comment to post when marking an issue as stale. Set to `false` to disable
+markComment: >
+  This issue has been automatically marked as stale because it has not had
+  recent activity. It will be closed if no further activity occurs. Thank you
+  for your contributions.
+# Comment to post when closing a stale issue. Set to `false` to disable
+closeComment: false

+ 160 - 0
CMakeLists.txt

@@ -0,0 +1,160 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(lio_sam)
+
+set(CMAKE_BUILD_TYPE "Release")
+set(CMAKE_CXX_FLAGS "-std=c++14")
+set(CMAKE_CXX_FLAGS_RELEASE "-O3 -Wall -g -pthread")
+
+find_package(catkin REQUIRED COMPONENTS
+  tf
+  roscpp
+  rospy
+  cv_bridge
+  # pcl library
+  pcl_conversions
+  # msgs
+  std_msgs
+  sensor_msgs
+  geometry_msgs
+  nav_msgs
+  message_generation
+)
+
+find_package(OpenMP REQUIRED)
+find_package(PCL REQUIRED QUIET)
+find_package(OpenCV REQUIRED QUIET)
+find_package(GTSAM REQUIRED QUIET)
+find_package(RapidJSON)
+
+add_message_files(
+  DIRECTORY msg
+  FILES
+  cloud_info.msg
+)
+
+generate_messages(
+  DEPENDENCIES
+  geometry_msgs
+  std_msgs
+  nav_msgs
+  sensor_msgs
+)
+
+catkin_package(
+  INCLUDE_DIRS include
+  DEPENDS PCL GTSAM
+
+  CATKIN_DEPENDS 
+  std_msgs
+  nav_msgs
+  geometry_msgs
+  sensor_msgs
+  message_runtime 
+  message_generation
+)
+
+# include directories
+include_directories(
+	include
+	${catkin_INCLUDE_DIRS}
+	${PCL_INCLUDE_DIRS}
+  ${OpenCV_INCLUDE_DIRS}
+	${GTSAM_INCLUDE_DIR}
+)
+
+# link directories
+link_directories(
+	include
+	${PCL_LIBRARY_DIRS}
+  ${OpenCV_LIBRARY_DIRS}
+  ${GTSAM_LIBRARY_DIRS}
+)
+
+###########
+## Build ##
+###########
+
+# Range Image Projection
+add_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp)
+add_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
+target_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
+
+# Feature Association
+add_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp)
+add_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
+target_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
+
+# Mapping Optimization
+add_executable(${PROJECT_NAME}_mapOptmization src/mapOptmization.cpp)
+add_dependencies(${PROJECT_NAME}_mapOptmization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
+target_compile_options(${PROJECT_NAME}_mapOptmization PRIVATE ${OpenMP_CXX_FLAGS})
+target_link_libraries(${PROJECT_NAME}_mapOptmization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)
+# localization
+add_executable(${PROJECT_NAME}_localization src/localization.cpp)
+add_dependencies(${PROJECT_NAME}_localization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
+target_compile_options(${PROJECT_NAME}_localization PRIVATE ${OpenMP_CXX_FLAGS})
+target_link_libraries(${PROJECT_NAME}_localization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)
+
+
+# IMU Preintegration
+add_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp)
+target_link_libraries(${PROJECT_NAME}_imuPreintegration ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam)
+
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+ install(TARGETS
+		 ${PROJECT_NAME}_imageProjection
+		 ${PROJECT_NAME}_featureExtraction
+		 ${PROJECT_NAME}_mapOptmization
+		 ${PROJECT_NAME}_localization
+		 ${PROJECT_NAME}_imuPreintegration
+   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+ )
+#
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+# Mark cpp header files for installation
+ install(DIRECTORY include/${PROJECT_NAME}/
+   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+   FILES_MATCHING PATTERN "*.h"
+   PATTERN ".svn" EXCLUDE
+ )
+
+# Mark cpp header files for installation
+ install(DIRECTORY
+		 launch
+   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+ )
+# Mark cpp header files for installation
+ install(DIRECTORY
+		 config
+   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+ )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )

+ 29 - 0
LICENSE

@@ -0,0 +1,29 @@
+BSD 3-Clause License
+ 
+Copyright (c) 2020, Tixiao Shan
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+  this list of conditions and the following disclaimer in the documentation
+  and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 226 - 0
README.md

@@ -0,0 +1,226 @@
+# LIO-SAM
+
+**A real-time lidar-inertial odometry package. We strongly recommend the users read this document thoroughly and test the package with the provided dataset first. A video of the demonstration of the method can be found on [YouTube](https://www.youtube.com/watch?v=A0H8CoORZJU).**
+
+<p align='center'>
+    <img src="./config/doc/demo.gif" alt="drawing" width="800"/>
+</p>
+
+<p align='center'>
+    <img src="./config/doc/device-hand-2.png" alt="drawing" width="200"/>
+    <img src="./config/doc/device-hand.png" alt="drawing" width="200"/>
+    <img src="./config/doc/device-jackal.png" alt="drawing" width="200"/>
+    <img src="./config/doc/device-boat.png" alt="drawing" width="200"/>
+</p>
+
+## Menu
+
+  - [**System architecture**](#system-architecture)
+
+  - [**Package dependency**](#dependency)
+
+  - [**Package install**](#install)
+
+  - [**Prepare lidar data**](#prepare-lidar-data) (must read)
+
+  - [**Prepare IMU data**](#prepare-imu-data) (must read)
+
+  - [**Sample datasets**](#sample-datasets)
+
+  - [**Run the package**](#run-the-package)
+
+  - [**Other notes**](#other-notes)
+
+  - [**Issues**](#issues)
+
+  - [**Paper**](#paper)
+
+  - [**TODO**](#todo)
+
+  - [**Acknowledgement**](#acknowledgement)
+
+## System architecture
+
+<p align='center'>
+    <img src="./config/doc/system.png" alt="drawing" width="800"/>
+</p>
+
+We design a system that maintains two graphs and runs up to 10x faster than real-time. 
+  - The factor graph in "mapOptimization.cpp" optimizes lidar odometry factor and GPS factor. This factor graph is maintained consistently throughout the whole test. 
+  - The factor graph in "imuPreintegration.cpp" optimizes IMU and lidar odometry factor and estimates IMU bias. This factor graph is reset periodically and guarantees real-time odometry estimation at IMU frequency.
+
+## Dependency
+
+- [ROS](http://wiki.ros.org/ROS/Installation) (tested with Kinetic and Melodic)
+  ```
+  sudo apt-get install -y ros-kinetic-navigation
+  sudo apt-get install -y ros-kinetic-robot-localization
+  sudo apt-get install -y ros-kinetic-robot-state-publisher
+  ```
+- [gtsam](https://github.com/borglab/gtsam/releases) (Georgia Tech Smoothing and Mapping library)
+  ```
+  wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.2.zip
+  cd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/
+  cd ~/Downloads/gtsam-4.0.2/
+  mkdir build && cd build
+  cmake -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF ..
+  sudo make install -j8
+  ```
+
+## Install
+
+Use the following commands to download and compile the package.
+
+```
+cd ~/catkin_ws/src
+git clone https://github.com/TixiaoShan/LIO-SAM.git
+cd ..
+catkin_make
+```
+
+## Prepare lidar data
+
+The user needs to prepare the point cloud data in the correct format for cloud deskewing, which is mainly done in "imageProjection.cpp". The two requirements are:
+  - **Provide point time stamp**. LIO-SAM uses IMU data to perform point cloud deskew. Thus, the relative point time in a scan needs to be known. The up-to-date Velodyne ROS driver should output this information directly. Here, we assume the point time channel is called "time." The definition of the point type is located at the top of the "imageProjection.cpp." "deskewPoint()" function utilizes this relative time to obtain the transformation of this point relative to the beginning of the scan. When the lidar rotates at 10Hz, the timestamp of a point should vary between 0 and 0.1 seconds. If you are using other lidar sensors, you may need to change the name of this time channel and make sure that it is the relative time in a scan.
+  - **Provide point ring number**. LIO-SAM uses this information to organize the point correctly in a matrix. The ring number indicates which channel of the sensor that this point belongs to. The definition of the point type is located at the top of "imageProjection.cpp." The up-to-date Velodyne ROS driver should output this information directly. Again, if you are using other lidar sensors, you may need to rename this information. Note that only mechanical lidars are supported by the package currently. 
+
+## Prepare IMU data
+
+  - **IMU requirement**. Like the original LOAM implementation, LIO-SAM only works with a 9-axis IMU, which gives roll, pitch, and yaw estimation. The roll and pitch estimation is mainly used to initialize the system at the correct attitude. The yaw estimation initializes the system at the right heading when using GPS data. Theoretically, an initialization procedure like VINS-Mono will enable LIO-SAM to work with a 6-axis IMU. The performance of the system largely depends on the quality of the IMU measurements. The higher the IMU data rate, the better the system accuracy. We use Microstrain 3DM-GX5-25, which outputs data at 500Hz. We recommend using an IMU that gives at least a 200Hz output rate. Note that the internal IMU of Ouster lidar is an 6-axis IMU.
+
+  - **IMU alignment**. LIO-SAM transforms IMU raw data from the IMU frame to the Lidar frame, which follows the ROS REP-105 convention (x - forward, y - left, z - upward). To make the system function properly, the correct extrinsic transformation needs to be provided in "params.yaml" file. **The reason why there are two extrinsics is that my IMU (Microstrain 3DM-GX5-25) acceleration and attitude have different cooridinates. Depend on your IMU manufacturer, the two extrinsics for your IMU may or may not be the same**. Using our setup as an example:
+    - we need to set the readings of x-z acceleration and gyro negative to transform the IMU data in the lidar frame, which is indicated by "extrinsicRot" in "params.yaml." 
+    - The transformation of attitude readings is slightly different. We rotate the attitude measurements by -90 degrees around "lidar-z" axis and get the corresponding roll, pitch, and yaw readings in the lidar frame. This transformation is indicated by "extrinsicRPY" in "params.yaml."
+
+  - **IMU debug**. It's strongly recommended that the user uncomment the debug lines in "imuHandler()" of "imageProjection.cpp" and test the output of the transformed IMU data. The user can rotate the sensor suite to check whether the readings correspond to the sensor's movement. A YouTube video that shows the corrected IMU data can be found [here (link to YouTube)](https://youtu.be/BOUK8LYQhHs).
+
+
+<p align='center'>
+    <img src="./config/doc/imu-transform.png" alt="drawing" width="800"/>
+</p>
+<p align='center'>
+    <img src="./config/doc/imu-debug.gif" alt="drawing" width="800"/>
+</p>
+
+## Sample datasets
+
+  * Download some sample datasets to test the functionality of the package. The datasets below is configured to run using the default settings:
+    - **Walking dataset:** [[Google Drive](https://drive.google.com/file/d/1HN5fYPXEHbDq0E5JtbQPkCHIHUoTFFnN/view?usp=sharing)][[Dropbox](https://www.dropbox.com/s/jzql78speb1jbcc/casual_walk.bag.zip?dl=0)]
+    - **Park dataset:** [[Google Drive](https://drive.google.com/file/d/19PZieaJaVkXDs2ZromaHTxYoq0zkiHae/view?usp=sharing)][[Dropbox](https://www.dropbox.com/s/fnuxpdc4f9yyb55/park.bag.zip?dl=0)]
+    - **Garden dataset:** [[Google Drive](https://drive.google.com/file/d/1q6yuVhyJbkUBhut9yhfox2WdV4VZ9BZX/view?usp=sharing)]
+
+  * The datasets below need the parameters to be configured. In these datasets, the point cloud topic is "points_raw." The IMU topic is "imu_correct," which gives the IMU data in ROS REP105 standard. Because no IMU transformation is needed for this dataset, the following configurations need to be changed to run this dataset successfully:
+    - The "imuTopic" parameter in "config/params.yaml" needs to be set to "imu_correct".
+    - The "extrinsicRot" and "extrinsicRPY" in "config/params.yaml" needs to be set as identity matrices.
+      - **Rotation dataset:** [[Google Drive](https://drive.google.com/file/d/1V4ijY4PgLdjKmdzcQ18Xu7VdcHo2UaWI/view?usp=sharing)][[Dropbox](https://www.dropbox.com/s/xdwzjnqwr7x19cl/rotation.bag.zip?dl=0)]
+      - **Campus dataset (large):** [[Google Drive](https://drive.google.com/file/d/1q4Sf7s2veVc7bs08Qeha3stOiwsytopL/view?usp=sharing)][[Dropbox](https://www.dropbox.com/s/etgnfg39ha79b7a/big_loop.bag.zip?dl=0)]
+      - **Campus dataset (small):** [[Google Drive](https://drive.google.com/file/d/1_V-cFMTQ4RO-_16mU9YPUE8ozsPeddCv/view?usp=sharing)][[Dropbox](https://www.dropbox.com/s/33u6epkfu36i1fd/west.bag.zip?dl=0)]
+      
+  * Ouster (OS1-128) dataset. No extrinsics need to be changed for this dataset if you are using the default settings. Please follow the Ouster notes below to configure the package to run with Ouster data. A video of the dataset can be found on [YouTube](https://youtu.be/O7fKgZQzkEo):
+    - **Rooftop dataset:** [[Google Drive](https://drive.google.com/file/d/1Qy2rZdPudFhDbATPpblioBb8fRtjDGQj/view?usp=sharing)]
+
+  * KITTI dataset. The extrinsics can be found in the Notes KITTI section below. To generate more bags using other KITTI raw data, you can use the python script provided in "config/doc/kitti2bag".
+    - **2011_09_30_drive_0028:** [[Google Drive](https://drive.google.com/file/d/12h3ooRAZVTjoMrf3uv1_KriEXm33kHc7/view?usp=sharing)]
+
+## Run the package
+
+1. Run the launch file:
+```
+roslaunch lio_sam run.launch
+```
+
+2. Play existing bag files:
+```
+rosbag play your-bag.bag -r 3
+```
+
+## Other notes
+
+  - **Loop closure:** The loop function here gives an example of proof of concept. It is directly adapted from LeGO-LOAM loop closure. For more advanced loop closure implementation, please refer to [ScanContext](https://github.com/irapkaist/SC-LeGO-LOAM). Set the "loopClosureEnableFlag" in "params.yaml" to "true" to test the loop closure function. In Rviz, uncheck "Map (cloud)" and check "Map (global)". This is because the visualized map - "Map (cloud)" - is simply a stack of point clouds in Rviz. Their postion will not be updated after pose correction. The loop closure function here is simply adapted from LeGO-LOAM, which is an ICP-based method. Because ICP runs pretty slow, it is suggested that the playback speed is set to be "-r 1". You can try the Garden dataset for testing.
+
+<p align='center'>
+    <img src="./config/doc/loop-closure.gif" alt="drawing" width="350"/>
+    <img src="./config/doc/loop-closure-2.gif" alt="drawing" width="350"/>
+</p>
+
+  - **Using GPS:** The park dataset is provided for testing LIO-SAM with GPS data. This dataset is gathered by [Yewei Huang](https://robustfieldautonomylab.github.io/people.html). To enable the GPS function, change "gpsTopic" in "params.yaml" to "odometry/gps". In Rviz, uncheck "Map (cloud)" and check "Map (global)". Also check "Odom GPS", which visualizes the GPS odometry. "gpsCovThreshold" can be adjusted to filter bad GPS readings. "poseCovThreshold" can be used to adjust the frequency of adding GPS factor to the graph. For example, you will notice the trajectory is constantly corrected by GPS whey you set "poseCovThreshold" to 1.0. Because of the heavy iSAM optimization, it's recommended that the playback speed is "-r 1".
+
+<p align='center'>
+    <img src="./config/doc/gps-demo.gif" alt="drawing" width="400"/>
+</p>
+
+  - **KITTI:** Since LIO-SAM needs a high-frequency IMU for function properly, we need to use KITTI raw data for testing. One problem remains unsolved is that the intrinsics of the IMU are unknown, which has a big impact on the accuracy of LIO-SAM. Download the provided sample data and make the following changes in "params.yaml":
+    - extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01] 
+    - extrinsicRot: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
+    - extrinsicRPY: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
+    - N_SCAN: 64
+    - downsampleRate: 2 or 4
+    - loopClosureEnableFlag: true or false
+
+<p align='center'>
+    <img src="./config/doc/kitti-map.png" alt="drawing" width="300"/>
+    <img src="./config/doc/kitti-demo.gif" alt="drawing" width="300"/>
+</p>
+
+  - **Ouster lidar:** To make LIO-SAM work with Ouster lidar, some preparations needs to be done on hardware and software level.
+    - Hardware:
+      - Use an external IMU. LIO-SAM does not work with the internal 6-axis IMU of Ouster lidar. You need to attach a 9-axis IMU to the lidar and perform data-gathering.
+      - Configure the driver. Change "timestamp_mode" in your Ouster launch file to "TIME_FROM_PTP_1588" so you can have ROS format timestamp for the point clouds.
+    - Software:
+      - Change "timeField" in "params.yaml" to "t". "t" is the point timestamp in a scan for Ouster lidar.
+      - Change "N_SCAN" and "Horizon_SCAN" in "params.yaml" according to your lidar, i.e., N_SCAN=128, Horizon_SCAN=1024.
+      - Comment the point definition for Velodyne on top of "imageProjection.cpp".
+      - Uncomment the point definition for Ouster on top of "imageProjection.cpp".
+      - Comment line "timeScanEnd = timeScanCur + laserCloudIn->points.back().time" in "imageProjection.cpp".
+      - Uncomment line "timeScanEnd = timeScanCur + (float)laserCloudIn->points.back().t / 1000000000.0" in "imageProjection.cpp".
+      - Comment line "deskewPoint(&thisPoint, laserCloudIn->points[i].time)" in "imageProjection.cpp".
+      - Uncomment line "deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0" in "imageProjection.cpp".
+      - Run "catkin_make" to re-compile the package.
+    - Gen 1 and Gen 2 Ouster:
+      It seems that the point coordinate definition might be different in different generations. Please refer to [Issue #94](https://github.com/TixiaoShan/LIO-SAM/issues/94) for debugging.
+
+<p align='center'>
+    <img src="./config/doc/ouster-device.jpg" alt="drawing" width="300"/>
+    <img src="./config/doc/ouster-demo.gif" alt="drawing" width="300"/>
+</p>
+
+## Issues
+
+  - **Zigzag or jerking behavior**: if your lidar and IMU data formats are consistent with the requirement of LIO-SAM, this problem is likely caused by un-synced timestamp of lidar and IMU data.
+
+  - **Jumpping up and down**: if you start testing your bag file and the base_link starts to jump up and down immediately, it is likely your IMU extrinsics are wrong. For example, the gravity acceleration has negative value.
+
+  - **mapOptimization crash**: it is usually caused by GTSAM. Please install the GTSAM specified in the README.md. More similar issues can be found [here](https://github.com/TixiaoShan/LIO-SAM/issues).
+
+## Paper 
+
+Thank you for citing [LIO-SAM (IROS-2020)](./config/doc/paper.pdf) if you use any of this code. 
+```
+@inproceedings{liosam2020shan,
+  title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},
+  author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},
+  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
+  year={2020},
+  organization={IEEE}
+}
+```
+
+Part of the code is adapted from [LeGO-LOAM](https://github.com/RobustFieldAutonomyLab/LeGO-LOAM).
+```
+@inproceedings{legoloam2018shan,
+  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},
+  author={Shan, Tixiao and Englot, Brendan},
+  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
+  pages={4758-4765},
+  year={2018},
+  organization={IEEE}
+}
+```
+
+## TODO
+
+  - [ ] [Bug within imuPreintegration](https://github.com/TixiaoShan/LIO-SAM/issues/104)
+
+## Acknowledgement
+
+  - LIO-SAM is based on LOAM (J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time).

+ 183 - 0
config/params.yaml

@@ -0,0 +1,183 @@
+lio_sam:
+
+  useRing: false
+  useOdom: true
+  # Topics
+  pointCloudTopic: "rslidar_points"               # Point cloud data
+  imuTopic: "imu_data"                         # IMU data
+  odomTopic: "odom"                   # IMU pre-preintegration odometry, same frequency as IMU
+  gpsTopic: "odometry/gpsz"                   # GPS odometry topic from navsat, see module_navsat.launch file
+
+  # Frames
+  lidarFrame: "base_link"
+  baselinkFrame: "base_link"
+  odometryFrame: "odom"
+  mapFrame: "map"
+
+  # GPS Settings
+  useImuHeadingInitialization: true           # if using GPS data, set to "true"
+  useGpsElevation: false                      # if GPS elevation is bad, set to "false"
+  gpsCovThreshold: 2.0                        # m^2, threshold for using GPS data
+  poseCovThreshold: 25.0                      # m^2, threshold for using GPS data
+
+  # Export settings
+  savePCD: true                              # https://github.com/TixiaoShan/LIO-SAM/issues/3
+  saveJson: true                              # https://github.com/TixiaoShan/LIO-SAM/issues/3
+  savePCDDirectory: "/work/file/map/"        # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation
+  mapPath: "/work/file/map/"
+
+  # Sensor Settings
+  N_SCAN: 16                                  # number of lidar channel (i.e., 16, 32, 64, 128)
+  Horizon_SCAN: 1800                          # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048)
+  timeField: "time"                           # point timestamp field, Velodyne - "time", Ouster - "t"
+  downsampleRate: 1                           # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1
+  lidarMinRange: 1.0                         # default: 1.0, minimum lidar range to be used
+  lidarMaxRange: 200.0                       # default: 1000.0, maximum lidar range to be used
+
+  # IMU Settings
+  imuAccNoise: 3.9939570888238808e-03
+  imuGyrNoise: 1.5636343949698187e-03
+  imuAccBiasN: 6.4356659353532566e-05
+  imuGyrBiasN: 3.5640318696367613e-05
+  imuGravity: 9.81
+#  imuGravity: 0
+  imuRPYWeight: 0.01
+  odomYaw: 0
+  extrinsicTrans: [ 0.0,0,0]
+#  extrinsicTrans: [ -8.086759e-01, 3.195559e-01, -7.997231e-01 ]
+#  extrinsicRPY: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
+#  extrinsicRot: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
+
+
+
+
+
+
+
+  # Extrinsics (lidar -> IMU)
+#  extrinsicTrans: [0.0, 0.0, 0.0]
+#  extrinsicRot: [-1, 0, 0,
+#                  0, 1, 0,
+#                  0, 0, -1]
+#  extrinsicRot: [ 0,  1, 0,
+#                  -1, 0, 0,
+#                  0, 0, 1 ]
+#  extrinsicRPY: [0,  1, 0,
+#                 -1, 0, 0,
+#                  0, 0, 1]
+  extrinsicRot: [1, 0, 0,
+                   0, 1, 0,
+                   0, 0, 1]
+  extrinsicRPY: [1, 0, 0,
+                   0, 1, 0,
+                   0, 0, 1]
+
+  # LOAM feature threshold
+  edgeThreshold: 1.0
+  surfThreshold: 0.1
+  edgeFeatureMinValidNum: 10
+  surfFeatureMinValidNum: 100
+
+  # voxel filter paprams
+  odometrySurfLeafSize: 0.2                     # default: 0.4 - outdoor, 0.2 - indoor
+  mappingCornerLeafSize: 0.1                    # default: 0.2 - outdoor, 0.1 - indoor
+  mappingSurfLeafSize: 0.2                      # default: 0.4 - outdoor, 0.2 - indoor
+
+  # robot motion constraint (in case you are using a 2D robot)
+  z_tollerance: 0                            # meters
+  rotation_tollerance: 0                     # radians
+
+  # CPU Params
+  numberOfCores: 8                              # number of cores for mapping optimization
+  mappingProcessInterval: 0.15                  # seconds, regulate mapping frequency
+
+  # Surrounding map
+  surroundingkeyframeAddingDistThreshold: 1.0   # meters, regulate keyframe adding threshold
+  surroundingkeyframeAddingAngleThreshold: 0.2  # radians, regulate keyframe adding threshold
+  surroundingKeyframeDensity: 1.0               # meters, downsample surrounding keyframe poses
+  surroundingKeyframeSearchRadius: 50.0         # meters, within n meters scan-to-map optimization (when loop closure disabled)
+
+  # Loop closure
+  loopClosureEnableFlag: true
+  loopClosureFrequency: 2.0                     # Hz, regulate loop closure constraint add frequency
+  surroundingKeyframeSize: 50                   # submap size (when loop closure enabled)
+  historyKeyframeSearchRadius: 2.0             # meters, key frame that is within n meters from current pose will be considerd for loop closure
+  historyKeyframeSearchTimeDiff: 30.0           # seconds, key frame that is n seconds older will be considered for loop closure
+  historyKeyframeSearchNum: 25                  # number of hostory key frames will be fused into a submap for loop closure
+  historyKeyframeFitnessScore: 0.3              # icp threshold, the smaller the better alignment
+
+  # Visualization
+  globalMapVisualizationSearchRadius: 1000.0    # meters, global map visualization radius
+  globalMapVisualizationPoseDensity: 1.0       # meters, global map visualization keyframe density
+  globalMapVisualizationLeafSize: 0.2           # meters, global map visualization cloud density
+
+  #Localization
+  addKeyFameNum: 0
+  ndtMinScore: 1.0
+
+
+
+# Navsat (convert GPS coordinates to Cartesian)
+navsat:
+  frequency: 50
+  wait_for_datum: false
+  delay: 0.0
+  magnetic_declination_radians: 0
+  yaw_offset: 0
+  zero_altitude: true
+  broadcast_utm_transform: false
+  broadcast_utm_transform_as_parent_frame: false
+  publish_filtered_gps: false
+
+# EKF for Navsat
+ekf_gps:
+  publish_tf: false
+  map_frame: map
+  odom_frame: odom
+  base_link_frame: base_link
+  world_frame: odom
+
+  frequency: 50
+  two_d_mode: false
+  sensor_timeout: 0.01
+  # -------------------------------------
+  # External IMU:
+  # -------------------------------------
+  imu0: imu_correct
+  # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link
+  imu0_config: [ false, false, false,
+                 true,  true,  true,
+                 false, false, false,
+                 false, false, true,
+                 true,  true,  true ]
+  imu0_differential: false
+  imu0_queue_size: 50
+  imu0_remove_gravitational_acceleration: true
+  # -------------------------------------
+  # Odometry (From Navsat):
+  # -------------------------------------
+  odom0: odometry/gps
+  odom0_config: [ true,  true,  true,
+                  false, false, false,
+                  false, false, false,
+                  false, false, false,
+                  false, false, false ]
+  odom0_differential: false
+  odom0_queue_size: 10
+
+  #                            x     y     z     r     p     y   x_dot  y_dot  z_dot  r_dot p_dot y_dot x_ddot y_ddot z_ddot
+  process_noise_covariance: [ 1.0,  0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    1.0,  0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    10.0, 0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0.03, 0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0.03, 0,    0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0.1,  0,     0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0.25,  0,     0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0.25,  0,     0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0.04,  0,    0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0.01, 0,    0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0.01, 0,    0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0.5,  0,    0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0.01, 0,      0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0.01,   0,
+                              0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0.015 ]

+ 12 - 0
config/test.json

@@ -0,0 +1,12 @@
+{
+  "name": "map",
+  "keyFrameSize": 126,
+  "keyFramePose3D": [
+    [0,0,2,0],
+    [2,2,2,0],
+  ],
+  "keyFramePose6D": [
+
+  ]
+
+}

+ 341 - 0
include/utility.h

@@ -0,0 +1,341 @@
+#pragma once
+#ifndef _UTILITY_LIDAR_ODOMETRY_H_
+#define _UTILITY_LIDAR_ODOMETRY_H_
+
+#include <ros/ros.h>
+
+#include <std_msgs/Header.h>
+#include <std_msgs/Float64MultiArray.h>
+#include <sensor_msgs/Imu.h>
+#include <sensor_msgs/PointCloud2.h>
+#include <sensor_msgs/NavSatFix.h>
+#include <nav_msgs/Odometry.h>
+#include <nav_msgs/Path.h>
+#include <visualization_msgs/Marker.h>
+#include <visualization_msgs/MarkerArray.h>
+
+#include <opencv2/opencv.hpp>
+
+#include <pcl/point_cloud.h>
+#include <pcl/point_types.h>
+#include <pcl/search/impl/search.hpp>
+#include <pcl/range_image/range_image.h>
+#include <pcl/kdtree/kdtree_flann.h>
+#include <pcl/common/common.h>
+#include <pcl/common/transforms.h>
+#include <pcl/registration/icp.h>
+#include <pcl/io/pcd_io.h>
+#include <pcl/filters/filter.h>
+#include <pcl/filters/voxel_grid.h>
+#include <pcl/filters/crop_box.h> 
+#include <pcl_conversions/pcl_conversions.h>
+
+#include <tf/LinearMath/Quaternion.h>
+#include <tf/transform_listener.h>
+#include <tf/transform_datatypes.h>
+#include <tf/transform_broadcaster.h>
+#include <tf/transform_broadcaster.h>
+
+#include <vector>
+#include <cmath>
+#include <algorithm>
+#include <queue>
+#include <deque>
+#include <iostream>
+#include <fstream>
+#include <ctime>
+#include <cfloat>
+#include <iterator>
+#include <sstream>
+#include <string>
+#include <limits>
+#include <iomanip>
+#include <array>
+#include <thread>
+#include <mutex>
+
+using namespace std;
+
+typedef pcl::PointXYZI PointType;
+
+class ParamServer
+{
+public:
+
+    ros::NodeHandle nh;
+
+    std::string robot_id;
+
+    //Topics
+    string pointCloudTopic;
+    string imuTopic;
+    string odomTopic;
+    string gpsTopic;
+
+    //Frames
+    string lidarFrame;
+    string baselinkFrame;
+    string odometryFrame;
+    string mapFrame;
+    string mapPath;
+    bool useRing;
+    bool useOdom;
+    // GPS Settings
+    bool useImuHeadingInitialization;
+    bool useGpsElevation;
+    float gpsCovThreshold;
+    float poseCovThreshold;
+
+    // Save pcd
+    bool savePCD;
+    bool saveJson;
+    string savePCDDirectory;
+
+    // Velodyne Sensor Configuration: Velodyne
+    int N_SCAN;
+    int Horizon_SCAN;
+    string timeField;
+    int downsampleRate;
+    float lidarMinRange;
+    float lidarMaxRange;
+
+    // IMU
+    float imuAccNoise;
+    float imuGyrNoise;
+    float imuAccBiasN;
+    float imuGyrBiasN;
+    float imuGravity;
+    float imuRPYWeight;
+
+    float odomYaw;
+
+    vector<double> extRotV;
+    vector<double> extRPYV;
+    vector<double> extTransV;
+    Eigen::Matrix3d extRot;
+    Eigen::Matrix3d extRPY;
+    Eigen::Vector3d extTrans;
+    Eigen::Quaterniond extQRPY;
+    Eigen::AngleAxisd odomAngle;
+
+    // LOAM
+    float edgeThreshold;
+    float surfThreshold;
+    int edgeFeatureMinValidNum;
+    int surfFeatureMinValidNum;
+
+    // voxel filter paprams
+    float odometrySurfLeafSize;
+    float mappingCornerLeafSize;
+    float mappingSurfLeafSize ;
+
+    float z_tollerance; 
+    float rotation_tollerance;
+
+    // CPU Params
+    int numberOfCores;
+    double mappingProcessInterval;
+
+    // Surrounding map
+    float surroundingkeyframeAddingDistThreshold; 
+    float surroundingkeyframeAddingAngleThreshold; 
+    float surroundingKeyframeDensity;
+    float surroundingKeyframeSearchRadius;
+    
+    // Loop closure
+    bool  loopClosureEnableFlag;
+    float loopClosureFrequency;
+    int   surroundingKeyframeSize;
+    float historyKeyframeSearchRadius;
+    float historyKeyframeSearchTimeDiff;
+    int   historyKeyframeSearchNum;
+    float historyKeyframeFitnessScore;
+
+    // global map visualization radius
+    float globalMapVisualizationSearchRadius;
+    float globalMapVisualizationPoseDensity;
+    float globalMapVisualizationLeafSize;
+    //Localization
+    int addKeyFameNum{0};
+    float ndtMinScore{0.05};
+    ParamServer()
+    {
+        nh.param<std::string>("/robot_id", robot_id, "roboat");
+
+        nh.param<std::string>("lio_sam/pointCloudTopic", pointCloudTopic, "points_raw");
+        nh.param<std::string>("lio_sam/imuTopic", imuTopic, "imu_correct");
+        nh.param<std::string>("lio_sam/odomTopic", odomTopic, "odometry/imu");
+        nh.param<std::string>("lio_sam/gpsTopic", gpsTopic, "odometry/gps");
+
+        nh.param<std::string>("lio_sam/lidarFrame", lidarFrame, "base_link");
+        nh.param<std::string>("lio_sam/baselinkFrame", baselinkFrame, "base_link");
+        nh.param<std::string>("lio_sam/odometryFrame", odometryFrame, "odom");
+        nh.param<std::string>("lio_sam/mapFrame", mapFrame, "map");
+        nh.param<std::string>("lio_sam/mapPath", mapPath, "");
+
+        nh.param<bool>("lio_sam/useImuHeadingInitialization", useImuHeadingInitialization, false);
+        nh.param<bool>("lio_sam/useRing", useRing, false);
+        nh.param<bool>("lio_sam/useOdom", useOdom, false);
+        nh.param<bool>("lio_sam/useGpsElevation", useGpsElevation, false);
+        nh.param<float>("lio_sam/gpsCovThreshold", gpsCovThreshold, 2.0);
+        nh.param<float>("lio_sam/poseCovThreshold", poseCovThreshold, 25.0);
+
+        nh.param<bool>("lio_sam/savePCD", savePCD, false);
+        nh.param<bool>("lio_sam/saveJson", saveJson, false);
+        nh.param<std::string>("lio_sam/savePCDDirectory", savePCDDirectory, "/Downloads/LOAM/");
+
+        nh.param<int>("lio_sam/N_SCAN", N_SCAN, 16);
+        nh.param<int>("lio_sam/Horizon_SCAN", Horizon_SCAN, 1800);
+        nh.param<std::string>("lio_sam/timeField", timeField, "time");
+        nh.param<int>("lio_sam/downsampleRate", downsampleRate, 1);
+        nh.param<float>("lio_sam/lidarMinRange", lidarMinRange, 1.0);
+        nh.param<float>("lio_sam/lidarMaxRange", lidarMaxRange, 1000.0);
+
+        nh.param<float>("lio_sam/imuAccNoise", imuAccNoise, 0.01);
+        nh.param<float>("lio_sam/imuGyrNoise", imuGyrNoise, 0.001);
+        nh.param<float>("lio_sam/imuAccBiasN", imuAccBiasN, 0.0002);
+        nh.param<float>("lio_sam/imuGyrBiasN", imuGyrBiasN, 0.00003);
+        nh.param<float>("lio_sam/imuGravity", imuGravity, 9.80511);
+        nh.param<float>("lio_sam/imuRPYWeight", imuRPYWeight, 0.01);
+        nh.param<float>("lio_sam/odomYaw", odomYaw, 0.0);
+        nh.param<vector<double>>("lio_sam/extrinsicRot", extRotV, vector<double>());
+        nh.param<vector<double>>("lio_sam/extrinsicRPY", extRPYV, vector<double>());
+        nh.param<vector<double>>("lio_sam/extrinsicTrans", extTransV, vector<double>());
+        extRot = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRotV.data(), 3, 3);
+        extRPY = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRPYV.data(), 3, 3);
+        extTrans = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extTransV.data(), 3, 1);
+        extQRPY = Eigen::Quaterniond(extRPY);
+        odomAngle = Eigen::AngleAxisd (odomYaw, Eigen::Vector3d(0,0,1));
+        nh.param<float>("lio_sam/edgeThreshold", edgeThreshold, 0.1);
+        nh.param<float>("lio_sam/surfThreshold", surfThreshold, 0.1);
+        nh.param<int>("lio_sam/edgeFeatureMinValidNum", edgeFeatureMinValidNum, 10);
+        nh.param<int>("lio_sam/surfFeatureMinValidNum", surfFeatureMinValidNum, 100);
+
+        nh.param<float>("lio_sam/odometrySurfLeafSize", odometrySurfLeafSize, 0.2);
+        nh.param<float>("lio_sam/mappingCornerLeafSize", mappingCornerLeafSize, 0.2);
+        nh.param<float>("lio_sam/mappingSurfLeafSize", mappingSurfLeafSize, 0.2);
+
+        nh.param<float>("lio_sam/z_tollerance", z_tollerance, FLT_MAX);
+        nh.param<float>("lio_sam/rotation_tollerance", rotation_tollerance, FLT_MAX);
+
+        nh.param<int>("lio_sam/numberOfCores", numberOfCores, 2);
+        nh.param<double>("lio_sam/mappingProcessInterval", mappingProcessInterval, 0.15);
+
+        nh.param<float>("lio_sam/surroundingkeyframeAddingDistThreshold", surroundingkeyframeAddingDistThreshold, 1.0);
+        nh.param<float>("lio_sam/surroundingkeyframeAddingAngleThreshold", surroundingkeyframeAddingAngleThreshold, 0.2);
+        nh.param<float>("lio_sam/surroundingKeyframeDensity", surroundingKeyframeDensity, 1.0);
+        nh.param<float>("lio_sam/surroundingKeyframeSearchRadius", surroundingKeyframeSearchRadius, 50.0);
+
+        nh.param<bool>("lio_sam/loopClosureEnableFlag", loopClosureEnableFlag, false);
+        nh.param<float>("lio_sam/loopClosureFrequency", loopClosureFrequency, 1.0);
+        nh.param<int>("lio_sam/surroundingKeyframeSize", surroundingKeyframeSize, 50);
+        nh.param<float>("lio_sam/historyKeyframeSearchRadius", historyKeyframeSearchRadius, 10.0);
+        nh.param<float>("lio_sam/historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0);
+        nh.param<int>("lio_sam/historyKeyframeSearchNum", historyKeyframeSearchNum, 25);
+        nh.param<float>("lio_sam/historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3);
+
+        nh.param<float>("lio_sam/globalMapVisualizationSearchRadius", globalMapVisualizationSearchRadius, 1e3);
+        nh.param<float>("lio_sam/globalMapVisualizationPoseDensity", globalMapVisualizationPoseDensity, 10.0);
+        nh.param<float>("lio_sam/globalMapVisualizationLeafSize", globalMapVisualizationLeafSize, 1.0);
+        nh.param<int>("lio_sam/addKeyFameNum", addKeyFameNum, 0);
+        nh.param<float>("lio_sam/ndtMinScore", ndtMinScore, 0.05);
+
+        usleep(100);
+    }
+
+    sensor_msgs::Imu imuConverter(const sensor_msgs::Imu& imu_in)
+    {
+        sensor_msgs::Imu imu_out = imu_in;
+        // rotate acceleration
+        Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z);
+        acc = extRot * acc;
+        imu_out.linear_acceleration.x = acc.x();
+        imu_out.linear_acceleration.y = acc.y();
+        imu_out.linear_acceleration.z = acc.z();
+        // rotate gyroscope
+        Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z);
+        gyr = extRot * gyr;
+        imu_out.angular_velocity.x = gyr.x();
+        imu_out.angular_velocity.y = gyr.y();
+        imu_out.angular_velocity.z = gyr.z();
+        // rotate roll pitch yaw
+        Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z);
+        Eigen::Quaterniond q_final = q_from * extQRPY;
+        imu_out.orientation.x = q_final.x();
+        imu_out.orientation.y = q_final.y();
+        imu_out.orientation.z = q_final.z();
+        imu_out.orientation.w = q_final.w();
+
+        if (sqrt(q_final.x()*q_final.x() + q_final.y()*q_final.y() + q_final.z()*q_final.z() + q_final.w()*q_final.w()) < 0.1)
+        {
+            ROS_ERROR("Invalid quaternion, please use a 9-axis IMU!");
+            ros::shutdown();
+        }
+
+        return imu_out;
+    }
+};
+
+
+sensor_msgs::PointCloud2 publishCloud(ros::Publisher *thisPub, pcl::PointCloud<PointType>::Ptr thisCloud, ros::Time thisStamp, std::string thisFrame)
+{
+    sensor_msgs::PointCloud2 tempCloud;
+    pcl::toROSMsg(*thisCloud, tempCloud);
+    tempCloud.header.stamp = thisStamp;
+    tempCloud.header.frame_id = thisFrame;
+    if (thisPub->getNumSubscribers() != 0)
+        thisPub->publish(tempCloud);
+    return tempCloud;
+}
+
+template<typename T>
+double ROS_TIME(T msg)
+{
+    return msg->header.stamp.toSec();
+}
+
+
+template<typename T>
+void imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z)
+{
+    *angular_x = thisImuMsg->angular_velocity.x;
+    *angular_y = thisImuMsg->angular_velocity.y;
+    *angular_z = thisImuMsg->angular_velocity.z;
+}
+
+
+template<typename T>
+void imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z)
+{
+    *acc_x = thisImuMsg->linear_acceleration.x;
+    *acc_y = thisImuMsg->linear_acceleration.y;
+    *acc_z = thisImuMsg->linear_acceleration.z;
+}
+
+
+template<typename T>
+void imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw)
+{
+    double imuRoll, imuPitch, imuYaw;
+    tf::Quaternion orientation;
+    tf::quaternionMsgToTF(thisImuMsg->orientation, orientation);
+    tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
+
+    *rosRoll = imuRoll;
+    *rosPitch = imuPitch;
+    *rosYaw = imuYaw;
+}
+
+
+float pointDistance(PointType p)
+{
+    return sqrt(p.x*p.x + p.y*p.y + p.z*p.z);
+}
+
+
+float pointDistance(PointType p1, PointType p2)
+{
+    return sqrt((p1.x-p2.x)*(p1.x-p2.x) + (p1.y-p2.y)*(p1.y-p2.y) + (p1.z-p2.z)*(p1.z-p2.z));
+}
+
+#endif

+ 616 - 0
launch/include/config/localization.rviz

@@ -0,0 +1,616 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 191
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /links1
+        - /Odometry1
+        - /Point Cloud1
+        - /Feature Cloud1
+        - /Mapping1
+        - /Mapping1/Map (cloud)1
+        - /Mapping1/Map (global)1
+        - /Grid1
+        - /PointCloud21
+        - /PointCloud22
+      Splitter Ratio: 0.5600000023841858
+    Tree Height: 672
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+    Name: Tool Properties
+    Splitter Ratio: 0.5
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Class: rviz/Group
+      Displays:
+        - Class: rviz/TF
+          Enabled: false
+          Frame Timeout: 999
+          Frames:
+            All Enabled: false
+          Marker Scale: 3
+          Name: TF
+          Show Arrows: true
+          Show Axes: true
+          Show Names: true
+          Tree:
+            {}
+          Update Interval: 0
+          Value: false
+        - Class: rviz/Axes
+          Enabled: true
+          Length: 2
+          Name: map
+          Radius: 0.5
+          Reference Frame: map
+          Value: true
+        - Class: rviz/Axes
+          Enabled: true
+          Length: 1
+          Name: base_link
+          Radius: 0.30000001192092896
+          Reference Frame: base_link
+          Value: true
+      Enabled: true
+      Name: links
+    - Class: rviz/Group
+      Displays:
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: true
+          Keep: 300
+          Name: Odom IMU
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 0.5
+            Axes Radius: 0.10000000149011612
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /odometry/imu
+          Unreliable: false
+          Value: true
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: true
+          Keep: 300
+          Name: Odom GPS
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 1
+            Axes Radius: 0.30000001192092896
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /odometry/gps
+          Unreliable: false
+          Value: true
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: false
+          Keep: 300
+          Name: Odom lidar
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 0.25
+            Axes Radius: 0.10000000149011612
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /lio_sam/mapping/odometry
+          Unreliable: false
+          Value: false
+      Enabled: false
+      Name: Odometry
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 152
+          Min Color: 0; 0; 0
+          Min Intensity: 1
+          Name: Velodyne
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: false
+          Size (Pixels): 2
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/deskew/cloud_deskewed
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 0.7900000214576721
+          Min Color: 0; 0; 0
+          Min Intensity: 0
+          Name: Reg Cloud
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 3
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/mapping/cloud_registered
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+      Enabled: true
+      Name: Point Cloud
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 0; 255; 0
+          Color Transformer: FlatColor
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.077729225158691
+          Min Color: 0; 0; 0
+          Min Intensity: 0.019985778257250786
+          Name: Edge Feature
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 5
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/feature/cloud_corner
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 85; 255
+          Color Transformer: FlatColor
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100607872009277
+          Min Color: 0; 0; 0
+          Min Intensity: 0.0007188677554950118
+          Name: Plannar Feature
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 3
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/feature/cloud_surface
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+      Enabled: false
+      Name: Feature Cloud
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 0.30000001192092896
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 7.4858574867248535
+            Min Value: -1.2309398651123047
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 300
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 205
+          Min Color: 0; 0; 0
+          Min Intensity: 1
+          Name: Map (cloud)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: false
+          Size (Pixels): 2
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/deskew/cloud_deskewed
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 36.61034393310547
+            Min Value: -2.3476977348327637
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: AxisColor
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100604057312012
+          Min Color: 0; 0; 0
+          Min Intensity: 0.014545874670147896
+          Name: Map (local)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 2
+          Size (m): 0.10000000149011612
+          Style: Flat Squares
+          Topic: /lio_sam/mapping/map_local
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Alpha: 0.30000001192092896
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 39.389625549316406
+            Min Value: -10.785320281982422
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: FlatColor
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 173.5
+          Min Color: 0; 0; 0
+          Min Intensity: 1
+          Name: Map (global)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 1
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/mapping/map_global
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Buffer Length: 1
+          Class: rviz/Path
+          Color: 85; 255; 255
+          Enabled: false
+          Head Diameter: 0.30000001192092896
+          Head Length: 0.20000000298023224
+          Length: 0.30000001192092896
+          Line Style: Billboards
+          Line Width: 0.10000000149011612
+          Name: Path (global)
+          Offset:
+            X: 0
+            Y: 0
+            Z: 0
+          Pose Color: 255; 85; 255
+          Pose Style: None
+          Radius: 0.029999999329447746
+          Shaft Diameter: 0.10000000149011612
+          Shaft Length: 0.10000000149011612
+          Topic: /lio_sam/mapping/path
+          Unreliable: false
+          Value: false
+        - Alpha: 1
+          Buffer Length: 1
+          Class: rviz/Path
+          Color: 255; 85; 255
+          Enabled: false
+          Head Diameter: 0.30000001192092896
+          Head Length: 0.20000000298023224
+          Length: 0.30000001192092896
+          Line Style: Billboards
+          Line Width: 0.10000000149011612
+          Name: Path (local)
+          Offset:
+            X: 0
+            Y: 0
+            Z: 0
+          Pose Color: 255; 85; 255
+          Pose Style: None
+          Radius: 0.029999999329447746
+          Shaft Diameter: 0.10000000149011612
+          Shaft Length: 0.10000000149011612
+          Topic: /lio_sam/imu/path
+          Unreliable: false
+          Value: false
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 20.802837371826172
+            Min Value: -0.03934507071971893
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: AxisColor
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100604057312012
+          Min Color: 0; 0; 0
+          Min Intensity: 0.014545874670147896
+          Name: Loop closure
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 2
+          Size (m): 0.30000001192092896
+          Style: Flat Squares
+          Topic: /lio_sam/mapping/icp_loop_closure_corrected_cloud
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Class: rviz/MarkerArray
+          Enabled: true
+          Marker Topic: /lio_sam/mapping/loop_closure_constraints
+          Name: Loop constraint
+          Namespaces:
+            {}
+          Queue Size: 100
+          Value: true
+      Enabled: true
+      Name: Mapping
+    - Alpha: 0.5
+      Cell Size: 5
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 100
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Alpha: 1
+      Autocompute Intensity Bounds: true
+      Autocompute Value Bounds:
+        Max Value: 0.6872708201408386
+        Min Value: 0
+        Value: true
+      Axis: Z
+      Channel Name: intensity
+      Class: rviz/PointCloud2
+      Color: 255; 255; 255
+      Color Transformer: FlatColor
+      Decay Time: 0
+      Enabled: true
+      Invert Rainbow: false
+      Max Color: 255; 255; 255
+      Max Intensity: 0
+      Min Color: 0; 0; 0
+      Min Intensity: 0
+      Name: PointCloud2
+      Position Transformer: XYZ
+      Queue Size: 10
+      Selectable: true
+      Size (Pixels): 3
+      Size (m): 0.009999999776482582
+      Style: Points
+      Topic: /lio_sam/mapping/pathPoint
+      Unreliable: false
+      Use Fixed Frame: true
+      Use rainbow: true
+      Value: true
+    - Alpha: 1
+      Autocompute Intensity Bounds: true
+      Autocompute Value Bounds:
+        Max Value: 10
+        Min Value: -10
+        Value: true
+      Axis: Z
+      Channel Name: intensity
+      Class: rviz/PointCloud2
+      Color: 255; 255; 255
+      Color Transformer: Intensity
+      Decay Time: 0
+      Enabled: true
+      Invert Rainbow: false
+      Max Color: 255; 255; 255
+      Max Intensity: 380
+      Min Color: 0; 0; 0
+      Min Intensity: 0
+      Name: PointCloud2
+      Position Transformer: XYZ
+      Queue Size: 10
+      Selectable: true
+      Size (Pixels): 3
+      Size (m): 0.009999999776482582
+      Style: Points
+      Topic: /lio_sam/mapping/trajectoryPath
+      Unreliable: false
+      Use Fixed Frame: true
+      Use rainbow: true
+      Value: true
+  Enabled: true
+  Global Options:
+    Background Color: 0; 0; 0
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 85.90950012207031
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Focal Point:
+        X: 15.41309642791748
+        Y: -3.277204990386963
+        Z: -2.8151767253875732
+      Focal Shape Fixed Size: false
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 1.5697963237762451
+      Target Frame: base_link
+      Value: Orbit (rviz)
+      Yaw: 4.779407978057861
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 1008
+  Hide Left Dock: false
+  Hide Right Dock: true
+  QMainWindow State: 000000ff00000000fd0000000400000000000001a30000039afc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c006100790073010000003b0000039a000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000005c00ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005d70000039a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1920
+  X: 0
+  Y: 0

+ 35 - 0
launch/include/config/robot.urdf.xacro

@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<robot name="lio" xmlns:xacro="http://tixiaoshan.github.io/">
+  <xacro:property name="PI" value="3.1415926535897931" />
+
+  <link name="chassis_link"></link>
+
+  <link name="base_link"></link>
+  <joint name="base_link_joint" type="fixed">
+    <parent link="base_link"/>
+    <child link="chassis_link" />
+    <origin xyz="0 0 0" rpy="0 0 0" />
+  </joint>
+
+  <link name="imu_link"> </link>
+  <joint name="imu_joint" type="fixed">
+    <parent link="chassis_link" />
+    <child link="imu_link" />
+    <origin xyz="0 0 0" rpy="0 0 0" />
+  </joint>
+
+  <link name="rslidar"> </link>
+  <joint name="rslidar_joint" type="fixed">
+    <parent link="chassis_link" />
+    <child link="rslidar" />
+    <origin xyz="0 0 0" rpy="0 0 0" />
+  </joint>
+
+  <link name="navsat_link"> </link>
+  <joint name="navsat_joint" type="fixed">
+    <parent link="chassis_link" />
+    <child link="navsat_link" />
+    <origin xyz="0 0 0" rpy="0 0 0" />
+  </joint>
+
+</robot>

+ 629 - 0
launch/include/config/rviz.rviz

@@ -0,0 +1,629 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 191
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /Global Options1
+        - /links1/TF1
+        - /links1/TF1/Frames1
+        - /links1/map1
+        - /Odometry1
+        - /Point Cloud1
+        - /Point Cloud1/Velodyne1
+        - /Feature Cloud1
+        - /Mapping1/Map (local)1
+      Splitter Ratio: 0.5600000023841858
+    Tree Height: 250
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Tool Properties
+    Expanded: ~
+    Name: Tool Properties
+    Splitter Ratio: 0.5
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Class: rviz/Group
+      Displays:
+        - Class: rviz/TF
+          Enabled: true
+          Frame Timeout: 999
+          Frames:
+            All Enabled: false
+            base_footprint:
+              Value: true
+            base_link:
+              Value: true
+            chassis_link:
+              Value: true
+            imu_link:
+              Value: true
+            map:
+              Value: true
+            navsat_link:
+              Value: true
+            odom:
+              Value: true
+            rslidar:
+              Value: true
+          Marker Scale: 3
+          Name: TF
+          Show Arrows: true
+          Show Axes: true
+          Show Names: true
+          Tree:
+            map:
+              odom:
+                base_footprint:
+                  {}
+                base_link:
+                  chassis_link:
+                    imu_link:
+                      {}
+                    navsat_link:
+                      {}
+                    rslidar:
+                      {}
+          Update Interval: 0
+          Value: true
+        - Class: rviz/Axes
+          Enabled: true
+          Length: 0.10000000149011612
+          Name: map
+          Radius: 0.05000000074505806
+          Reference Frame: map
+          Value: true
+        - Class: rviz/Axes
+          Enabled: false
+          Length: 1
+          Name: base_link
+          Radius: 0.30000001192092896
+          Reference Frame: base_link
+          Value: false
+      Enabled: true
+      Name: links
+    - Class: rviz/Group
+      Displays:
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: true
+          Keep: 300
+          Name: Odom IMU
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 0.5
+            Axes Radius: 0.10000000149011612
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /odometry/imu
+          Unreliable: false
+          Value: true
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: true
+          Keep: 300
+          Name: Odom GPS
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 1
+            Axes Radius: 0.30000001192092896
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /odometry/gps
+          Unreliable: false
+          Value: true
+        - Angle Tolerance: 0.10000000149011612
+          Class: rviz/Odometry
+          Covariance:
+            Orientation:
+              Alpha: 0.5
+              Color: 255; 255; 127
+              Color Style: Unique
+              Frame: Local
+              Offset: 1
+              Scale: 1
+              Value: true
+            Position:
+              Alpha: 0.30000001192092896
+              Color: 204; 51; 204
+              Scale: 1
+              Value: true
+            Value: false
+          Enabled: false
+          Keep: 300
+          Name: Odom lidar
+          Position Tolerance: 0.10000000149011612
+          Shape:
+            Alpha: 1
+            Axes Length: 0.25
+            Axes Radius: 0.10000000149011612
+            Color: 255; 25; 0
+            Head Length: 0.30000001192092896
+            Head Radius: 0.10000000149011612
+            Shaft Length: 1
+            Shaft Radius: 0.05000000074505806
+            Value: Axes
+          Topic: /lio_sam/mapping/odometry
+          Unreliable: false
+          Value: false
+      Enabled: false
+      Name: Odometry
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 0
+          Min Color: 0; 0; 0
+          Min Intensity: 0
+          Name: Velodyne
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: false
+          Size (Pixels): 2
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/deskew/cloud_deskewed
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 0.7900000214576721
+          Min Color: 0; 0; 0
+          Min Intensity: 0
+          Name: Reg Cloud
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 3
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/mapping/cloud_registered
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+      Enabled: true
+      Name: Point Cloud
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 0; 255; 0
+          Color Transformer: FlatColor
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.077729225158691
+          Min Color: 0; 0; 0
+          Min Intensity: 0.019985778257250786
+          Name: Edge Feature
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 5
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/feature/cloud_corner
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 85; 255
+          Color Transformer: FlatColor
+          Decay Time: 0
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100607872009277
+          Min Color: 0; 0; 0
+          Min Intensity: 0.0007188677554950118
+          Name: Plannar Feature
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 3
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/feature/cloud_surface
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+      Enabled: false
+      Name: Feature Cloud
+    - Class: rviz/Group
+      Displays:
+        - Alpha: 0.30000001192092896
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 7.4858574867248535
+            Min Value: -1.2309398651123047
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 300
+          Enabled: true
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 0
+          Min Color: 0; 0; 0
+          Min Intensity: 0
+          Name: Map (cloud)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: false
+          Size (Pixels): 2
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/mapping/cloud_registered
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 1.8862897157669067
+            Min Value: -3.8047313690185547
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: AxisColor
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100604057312012
+          Min Color: 0; 0; 0
+          Min Intensity: 0.014545874670147896
+          Name: Map (local)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 2
+          Size (m): 0.10000000149011612
+          Style: Flat Squares
+          Topic: /lio_sam/mapping/cloud_registered_raw
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 10
+            Min Value: -10
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: Intensity
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100096702575684
+          Min Color: 0; 0; 0
+          Min Intensity: 0.007923072203993797
+          Name: Map (global)
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 2
+          Size (m): 0.009999999776482582
+          Style: Points
+          Topic: /lio_sam/mapping/map_global
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Alpha: 1
+          Buffer Length: 1
+          Class: rviz/Path
+          Color: 85; 255; 255
+          Enabled: true
+          Head Diameter: 0.30000001192092896
+          Head Length: 0.20000000298023224
+          Length: 0.30000001192092896
+          Line Style: Billboards
+          Line Width: 0.10000000149011612
+          Name: Path (global)
+          Offset:
+            X: 0
+            Y: 0
+            Z: 0
+          Pose Color: 255; 85; 255
+          Pose Style: None
+          Radius: 0.029999999329447746
+          Shaft Diameter: 0.10000000149011612
+          Shaft Length: 0.10000000149011612
+          Topic: /lio_sam/mapping/path
+          Unreliable: false
+          Value: true
+        - Alpha: 1
+          Buffer Length: 1
+          Class: rviz/Path
+          Color: 255; 85; 255
+          Enabled: true
+          Head Diameter: 0.30000001192092896
+          Head Length: 0.20000000298023224
+          Length: 0.30000001192092896
+          Line Style: Billboards
+          Line Width: 0.10000000149011612
+          Name: Path (local)
+          Offset:
+            X: 0
+            Y: 0
+            Z: 0
+          Pose Color: 255; 85; 255
+          Pose Style: None
+          Radius: 0.029999999329447746
+          Shaft Diameter: 0.10000000149011612
+          Shaft Length: 0.10000000149011612
+          Topic: /lio_sam/imu/path
+          Unreliable: false
+          Value: true
+        - Alpha: 1
+          Autocompute Intensity Bounds: true
+          Autocompute Value Bounds:
+            Max Value: 20.802837371826172
+            Min Value: -0.03934507071971893
+            Value: true
+          Axis: Z
+          Channel Name: intensity
+          Class: rviz/PointCloud2
+          Color: 255; 255; 255
+          Color Transformer: AxisColor
+          Decay Time: 0
+          Enabled: false
+          Invert Rainbow: false
+          Max Color: 255; 255; 255
+          Max Intensity: 15.100604057312012
+          Min Color: 0; 0; 0
+          Min Intensity: 0.014545874670147896
+          Name: Loop closure
+          Position Transformer: XYZ
+          Queue Size: 10
+          Selectable: true
+          Size (Pixels): 2
+          Size (m): 0.30000001192092896
+          Style: Flat Squares
+          Topic: /lio_sam/mapping/icp_loop_closure_corrected_cloud
+          Unreliable: false
+          Use Fixed Frame: true
+          Use rainbow: true
+          Value: false
+        - Class: rviz/MarkerArray
+          Enabled: true
+          Marker Topic: /lio_sam/mapping/loop_closure_constraints
+          Name: Loop constraint
+          Namespaces:
+            {}
+          Queue Size: 100
+          Value: true
+      Enabled: true
+      Name: Mapping
+    - Alpha: 0.5
+      Cell Size: 5
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 100
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Class: rviz/TF
+      Enabled: false
+      Frame Timeout: 15
+      Frames:
+        All Enabled: true
+      Marker Scale: 1
+      Name: TF
+      Show Arrows: true
+      Show Axes: true
+      Show Names: true
+      Tree:
+        {}
+      Update Interval: 0
+      Value: false
+    - Alpha: 1
+      Autocompute Intensity Bounds: true
+      Autocompute Value Bounds:
+        Max Value: 10
+        Min Value: -10
+        Value: true
+      Axis: Z
+      Channel Name: intensity
+      Class: rviz/PointCloud2
+      Color: 255; 255; 255
+      Color Transformer: FlatColor
+      Decay Time: 0
+      Enabled: true
+      Invert Rainbow: false
+      Max Color: 255; 255; 255
+      Max Intensity: 166
+      Min Color: 0; 0; 0
+      Min Intensity: 1
+      Name: PointCloud2
+      Position Transformer: XYZ
+      Queue Size: 10
+      Selectable: true
+      Size (Pixels): 3
+      Size (m): 0.009999999776482582
+      Style: Flat Squares
+      Topic: /rslidar_points
+      Unreliable: false
+      Use Fixed Frame: true
+      Use rainbow: true
+      Value: true
+    - Alpha: 1
+      Class: rviz_plugin_tutorials/Imu
+      Color: 204; 51; 204
+      Enabled: false
+      History Length: 1
+      Name: Imu
+      Topic: /imu_data
+      Unreliable: false
+      Value: false
+  Enabled: true
+  Global Options:
+    Background Color: 0; 0; 0
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+  Value: true
+  Views:
+    Current:
+      Class: rviz/XYOrbit
+      Distance: 17.998422622680664
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Focal Point:
+        X: 1.7836589813232422
+        Y: -1.2112236022949219
+        Z: 9.9528806458693e-6
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 0.13039879500865936
+      Target Frame: base_link
+      Value: XYOrbit (rviz)
+      Yaw: 2.8935792446136475
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: true
+  Height: 1016
+  Hide Left Dock: true
+  Hide Right Dock: true
+  QMainWindow State: 000000ff00000000fd000000040000000000000346000003bafc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c006100790073000000003d000003ba000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000005c00ffffff0000000100000111000003bafc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d000003ba000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d00650100000000000004500000000000000000000007800000039e00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: true
+  Width: 1920
+  X: 0
+  Y: 0

+ 9 - 0
launch/include/map_rviz.launch

@@ -0,0 +1,9 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+
+    <!--- Run Rviz-->
+        <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/rviz.rviz" />
+<!--    <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/localization.rviz" />-->
+
+</launch>

+ 11 - 0
launch/include/module_loam.launch

@@ -0,0 +1,11 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+    
+    <node pkg="$(arg project)" type="$(arg project)_imuPreintegration"   name="$(arg project)_imuPreintegration"    output="screen" 	respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_imageProjection"     name="$(arg project)_imageProjection"      output="screen"     respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_featureExtraction"   name="$(arg project)_featureExtraction"    output="screen"     respawn="true"/>
+<!--    <node pkg="$(arg project)" type="$(arg project)_mapOptmization"      name="$(arg project)_mapOptmization"       output="screen"     respawn="true"/>-->
+    <node pkg="$(arg project)" type="$(arg project)_localization"      name="$(arg project)_localization"       output="screen"     respawn="true"/>
+
+</launch>

+ 20 - 0
launch/include/module_navsat.launch

@@ -0,0 +1,20 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+
+    <env name="ROSCONSOLE_CONFIG_FILE" value="$(find lio_sam)/launch/include/rosconsole/rosconsole_error.conf"/>
+    
+    <!-- EKF GPS-->
+    <node pkg="robot_localization" type="ekf_localization_node" name="ekf_gps" respawn="true">
+        <remap from="odometry/filtered" to="odometry/navsat" />
+    </node>
+
+    <!-- Navsat -->
+    <node pkg="robot_localization" type="navsat_transform_node" name="navsat" respawn="true">
+        <!-- <rosparam param="datum">[42.35893211, -71.09345588, 0.0, world, base_link]</rosparam> -->
+        <remap from="imu/data" to="imu_correct" />
+        <remap from="gps/fix" to="gps/fix" />
+        <remap from="odometry/filtered" to="odometry/navsat" />
+    </node>
+
+</launch>

+ 11 - 0
launch/include/module_robot_state_publisher.launch

@@ -0,0 +1,11 @@
+<launch>
+
+	<arg name="project" default="lio_sam"/>
+
+    <param name="robot_description" command="$(find xacro)/xacro $(find lio_sam)/launch/include/config/robot.urdf.xacro --inorder" />
+
+    <node name="robot_state_publisher" pkg="robot_state_publisher" type="robot_state_publisher" respawn="true">
+        <!-- <param name="tf_prefix" value="$(env ROS_HOSTNAME)"/> -->
+    </node>
+  
+</launch>

+ 9 - 0
launch/include/module_rviz.launch

@@ -0,0 +1,9 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+
+    <!--- Run Rviz-->
+<!--    <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/rviz.rviz" />-->
+    <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/localization.rviz" />
+
+</launch>

+ 2 - 0
launch/include/rosconsole/rosconsole_error.conf

@@ -0,0 +1,2 @@
+# Set the default ros output to warning and higher
+log4j.logger.ros = ERROR

+ 2 - 0
launch/include/rosconsole/rosconsole_info.conf

@@ -0,0 +1,2 @@
+# Set the default ros output to warning and higher
+log4j.logger.ros = INFO

+ 2 - 0
launch/include/rosconsole/rosconsole_warn.conf

@@ -0,0 +1,2 @@
+# Set the default ros output to warning and higher
+log4j.logger.ros = WARN

+ 30 - 0
launch/localization.launch

@@ -0,0 +1,30 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+
+    <!-- Parameters -->
+    <rosparam file="$(find lio_sam)/config/params.yaml" command="load"/>
+
+    <!--- LOAM -->
+
+    <node pkg="$(arg project)" type="$(arg project)_imuPreintegration" name="$(arg project)_imuPreintegration"
+          output="screen" respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_imageProjection" name="$(arg project)_imageProjection"
+          output="screen" respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_featureExtraction" name="$(arg project)_featureExtraction"
+          output="screen" respawn="true"/>
+    <!--    <node pkg="$(arg project)" type="$(arg project)_mapOptmization" name="$(arg project)_mapOptmization" output="screen"-->
+    <!--          respawn="true"/>-->
+    <node pkg="$(arg project)" type="$(arg project)_localization" name="$(arg project)_localization" output="screen"
+          respawn="true"/>
+
+    <!--- Robot State TF -->
+    <include file="$(find lio_sam)/launch/include/module_robot_state_publisher.launch"/>
+
+    <!--- Run Navsat -->
+    <include file="$(find lio_sam)/launch/include/module_navsat.launch"/>
+
+    <!--- Run Rviz-->
+    <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/localization.rviz"/>
+
+</launch>

+ 29 - 0
launch/mapping.launch

@@ -0,0 +1,29 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+    <param name="/use_sim_time" value="false" />
+
+    <!-- Parameters -->
+    <rosparam file="$(find lio_sam)/config/params.yaml" command="load"/>
+
+    <!--- LOAM -->
+    <node pkg="$(arg project)" type="$(arg project)_imuPreintegration" name="$(arg project)_imuPreintegration"
+          output="screen" respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_imageProjection" name="$(arg project)_imageProjection"
+          output="screen" respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_featureExtraction" name="$(arg project)_featureExtraction"
+          output="screen" respawn="true"/>
+    <node pkg="$(arg project)" type="$(arg project)_mapOptmization" name="$(arg project)_mapOptmization" output="screen"
+          respawn="true"/>
+    <!--    <node pkg="$(arg weproject)" type="$(arg project)_localization"      name="$(arg project)_localization"       output="screen"     respawn="true"/>-->
+
+    <!--- Robot State TF -->
+    <include file="$(find lio_sam)/launch/include/module_robot_state_publisher.launch"/>
+
+    <!--- Run Navsat -->
+    <include file="$(find lio_sam)/launch/include/module_navsat.launch"/>
+
+    <!--- Run Rviz-->
+    <node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/rviz.rviz" />
+
+</launch>

+ 20 - 0
launch/run.launch

@@ -0,0 +1,20 @@
+<launch>
+
+    <arg name="project" default="lio_sam"/>
+    
+    <!-- Parameters -->
+    <rosparam file="$(find lio_sam)/config/params.yaml" command="load" />
+
+    <!--- LOAM -->
+    <include file="$(find lio_sam)/launch/include/module_loam.launch" />
+
+    <!--- Robot State TF -->
+    <include file="$(find lio_sam)/launch/include/module_robot_state_publisher.launch" />
+
+    <!--- Run Navsat -->
+    <include file="$(find lio_sam)/launch/include/module_navsat.launch" />
+
+    <!--- Run Rviz-->
+    <include file="$(find lio_sam)/launch/include/module_rviz.launch" />
+
+</launch>

+ 29 - 0
msg/cloud_info.msg

@@ -0,0 +1,29 @@
+# Cloud Info
+Header header 
+
+int32[] startRingIndex
+int32[] endRingIndex
+
+int32[]  pointColInd # point column index in range image
+float32[] pointRange # point range 
+
+int64 imuAvailable
+int64 odomAvailable
+
+# Attitude for LOAM initialization
+float32 imuRollInit
+float32 imuPitchInit
+float32 imuYawInit
+
+# Initial guess from imu pre-integration
+float32 initialGuessX
+float32 initialGuessY
+float32 initialGuessZ
+float32 initialGuessRoll
+float32 initialGuessPitch
+float32 initialGuessYaw
+
+# Point cloud messages
+sensor_msgs/PointCloud2 cloud_deskewed  # original cloud deskewed
+sensor_msgs/PointCloud2 cloud_corner    # extracted corner feature
+sensor_msgs/PointCloud2 cloud_surface   # extracted surface feature

+ 46 - 0
package.xml

@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<package>
+  <name>lio_sam</name>
+  <version>1.0.0</version>
+  <description>Lidar Odometry</description>
+
+  <maintainer email="shant@mit.edu">Tixiao Shan</maintainer>
+
+  <license>TODO</license>
+
+  <author>Tixiao Shan</author>
+
+  <buildtool_depend>catkin</buildtool_depend>
+
+  <build_depend>roscpp</build_depend>
+  <run_depend>roscpp</run_depend>
+  <build_depend>rospy</build_depend>
+  <run_depend>rospy</run_depend>
+  
+  <build_depend>tf</build_depend>
+  <run_depend>tf</run_depend>
+
+  <build_depend>cv_bridge</build_depend>
+  <run_depend>cv_bridge</run_depend>
+
+  <build_depend>pcl_conversions</build_depend>
+  <run_depend>pcl_conversions</run_depend>
+
+  <build_depend>std_msgs</build_depend>
+  <run_depend>std_msgs</run_depend>
+  <build_depend>sensor_msgs</build_depend>
+  <run_depend>sensor_msgs</run_depend>
+  <build_depend>geometry_msgs</build_depend>
+  <run_depend>geometry_msgs</run_depend>
+  <build_depend>nav_msgs</build_depend>
+  <run_depend>nav_msgs</run_depend>
+
+  <build_depend>message_generation</build_depend>
+  <run_depend>message_generation</run_depend>
+  <build_depend>message_runtime</build_depend>
+  <run_depend>message_runtime</run_depend>
+
+  <build_depend>GTSAM</build_depend>
+  <run_depend>GTSAM</run_depend>
+
+</package>

+ 273 - 0
src/featureExtraction.cpp

@@ -0,0 +1,273 @@
+#include "utility.h"
+#include "lio_sam/cloud_info.h"
+
+struct smoothness_t{ 
+    float value;
+    size_t ind;
+};
+
+struct by_value{ 
+    bool operator()(smoothness_t const &left, smoothness_t const &right) { 
+        return left.value < right.value;
+    }
+};
+
+class FeatureExtraction : public ParamServer
+{
+
+public:
+
+    ros::Subscriber subLaserCloudInfo;
+
+    ros::Publisher pubLaserCloudInfo;
+    ros::Publisher pubCornerPoints;
+    ros::Publisher pubSurfacePoints;
+
+    pcl::PointCloud<PointType>::Ptr extractedCloud;
+    pcl::PointCloud<PointType>::Ptr cornerCloud;
+    pcl::PointCloud<PointType>::Ptr surfaceCloud;
+
+    pcl::VoxelGrid<PointType> downSizeFilter;
+
+    lio_sam::cloud_info cloudInfo;
+    std_msgs::Header cloudHeader;
+
+    std::vector<smoothness_t> cloudSmoothness;
+    float *cloudCurvature;
+    int *cloudNeighborPicked;
+    int *cloudLabel;
+
+    FeatureExtraction()
+    {
+        subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>("lio_sam/deskew/cloud_info", 1, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
+
+        pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> ("lio_sam/feature/cloud_info", 1);
+        pubCornerPoints = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/feature/cloud_corner", 1);
+        pubSurfacePoints = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/feature/cloud_surface", 1);
+        
+        initializationValue();
+    }
+
+    void initializationValue()
+    {
+        //分配内存
+        cloudSmoothness.resize(N_SCAN*Horizon_SCAN);
+        //odometrySurfLeafSize 体素滤波的像素大小
+        downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize);
+
+        extractedCloud.reset(new pcl::PointCloud<PointType>());
+        cornerCloud.reset(new pcl::PointCloud<PointType>());
+        surfaceCloud.reset(new pcl::PointCloud<PointType>());
+
+        cloudCurvature = new float[N_SCAN*Horizon_SCAN];
+        cloudNeighborPicked = new int[N_SCAN*Horizon_SCAN];
+        cloudLabel = new int[N_SCAN*Horizon_SCAN];
+    }
+
+    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
+    {
+        cloudInfo = *msgIn; // new cloud info
+        cloudHeader = msgIn->header; // new cloud header
+        pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); // new cloud for extraction
+
+        calculateSmoothness();
+
+        markOccludedPoints();
+
+        extractFeatures();
+
+        publishFeatureCloud();
+    }
+
+    void calculateSmoothness()
+    {
+        int cloudSize = extractedCloud->points.size();
+        for (int i = 5; i < cloudSize - 5; i++)
+        {
+            float diffRange = cloudInfo.pointRange[i-5] + cloudInfo.pointRange[i-4]
+                            + cloudInfo.pointRange[i-3] + cloudInfo.pointRange[i-2]
+                            + cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i] * 10
+                            + cloudInfo.pointRange[i+1] + cloudInfo.pointRange[i+2]
+                            + cloudInfo.pointRange[i+3] + cloudInfo.pointRange[i+4]
+                            + cloudInfo.pointRange[i+5];            
+
+            cloudCurvature[i] = diffRange*diffRange;//diffX * diffX + diffY * diffY + diffZ * diffZ;
+
+            cloudNeighborPicked[i] = 0;
+            cloudLabel[i] = 0;
+            // cloudSmoothness for sorting
+            cloudSmoothness[i].value = cloudCurvature[i];
+            cloudSmoothness[i].ind = i;
+        }
+    }
+
+    void markOccludedPoints()
+    {
+        int cloudSize = extractedCloud->points.size();
+        // mark occluded points and parallel beam points
+        for (int i = 5; i < cloudSize - 6; ++i)
+        {
+            // occluded points
+            float depth1 = cloudInfo.pointRange[i];
+            float depth2 = cloudInfo.pointRange[i+1];
+            int columnDiff = std::abs(int(cloudInfo.pointColInd[i+1] - cloudInfo.pointColInd[i]));
+
+            if (columnDiff < 10){
+                // 10 pixel diff in range image
+                if (depth1 - depth2 > 0.3){
+                    cloudNeighborPicked[i - 5] = 1;
+                    cloudNeighborPicked[i - 4] = 1;
+                    cloudNeighborPicked[i - 3] = 1;
+                    cloudNeighborPicked[i - 2] = 1;
+                    cloudNeighborPicked[i - 1] = 1;
+                    cloudNeighborPicked[i] = 1;
+                }else if (depth2 - depth1 > 0.3){
+                    cloudNeighborPicked[i + 1] = 1;
+                    cloudNeighborPicked[i + 2] = 1;
+                    cloudNeighborPicked[i + 3] = 1;
+                    cloudNeighborPicked[i + 4] = 1;
+                    cloudNeighborPicked[i + 5] = 1;
+                    cloudNeighborPicked[i + 6] = 1;
+                }
+            }
+            // parallel beam
+            float diff1 = std::abs(float(cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i]));
+            float diff2 = std::abs(float(cloudInfo.pointRange[i+1] - cloudInfo.pointRange[i]));
+
+            if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i])
+                cloudNeighborPicked[i] = 1;
+        }
+    }
+
+    void extractFeatures()
+    {
+        cornerCloud->clear();
+        surfaceCloud->clear();
+
+        pcl::PointCloud<PointType>::Ptr surfaceCloudScan(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr surfaceCloudScanDS(new pcl::PointCloud<PointType>());
+
+        for (int i = 0; i < N_SCAN; i++)
+        {
+            surfaceCloudScan->clear();
+
+            for (int j = 0; j < 6; j++)
+            {
+
+                int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6;
+                int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1;
+
+                if (sp >= ep)
+                    continue;
+
+                std::sort(cloudSmoothness.begin()+sp, cloudSmoothness.begin()+ep, by_value());
+
+                int largestPickedNum = 0;
+                for (int k = ep; k >= sp; k--)
+                {
+                    int ind = cloudSmoothness[k].ind;
+                    if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold)
+                    {
+                        largestPickedNum++;
+                        if (largestPickedNum <= 20){
+                            cloudLabel[ind] = 1;
+                            cornerCloud->push_back(extractedCloud->points[ind]);
+                        } else {
+                            break;
+                        }
+
+                        cloudNeighborPicked[ind] = 1;
+                        for (int l = 1; l <= 5; l++)
+                        {
+                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
+                            if (columnDiff > 10)
+                                break;
+                            cloudNeighborPicked[ind + l] = 1;
+                        }
+                        for (int l = -1; l >= -5; l--)
+                        {
+                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
+                            if (columnDiff > 10)
+                                break;
+                            cloudNeighborPicked[ind + l] = 1;
+                        }
+                    }
+                }
+
+                for (int k = sp; k <= ep; k++)
+                {
+                    int ind = cloudSmoothness[k].ind;
+                    if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold)
+                    {
+
+                        cloudLabel[ind] = -1;
+                        cloudNeighborPicked[ind] = 1;
+
+                        for (int l = 1; l <= 5; l++) {
+
+                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
+                            if (columnDiff > 10)
+                                break;
+
+                            cloudNeighborPicked[ind + l] = 1;
+                        }
+                        for (int l = -1; l >= -5; l--) {
+
+                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
+                            if (columnDiff > 10)
+                                break;
+
+                            cloudNeighborPicked[ind + l] = 1;
+                        }
+                    }
+                }
+
+                for (int k = sp; k <= ep; k++)
+                {
+                    if (cloudLabel[k] <= 0){
+                        surfaceCloudScan->push_back(extractedCloud->points[k]);
+                    }
+                }
+            }
+
+            surfaceCloudScanDS->clear();
+            downSizeFilter.setInputCloud(surfaceCloudScan);
+            downSizeFilter.filter(*surfaceCloudScanDS);
+
+            *surfaceCloud += *surfaceCloudScanDS;
+        }
+    }
+
+    void freeCloudInfoMemory()
+    {
+        cloudInfo.startRingIndex.clear();
+        cloudInfo.endRingIndex.clear();
+        cloudInfo.pointColInd.clear();
+        cloudInfo.pointRange.clear();
+    }
+
+    void publishFeatureCloud()
+    {
+        // free cloud info memory
+        freeCloudInfoMemory();
+        // save newly extracted features
+        cloudInfo.cloud_corner  = publishCloud(&pubCornerPoints,  cornerCloud,  cloudHeader.stamp, lidarFrame);
+        cloudInfo.cloud_surface = publishCloud(&pubSurfacePoints, surfaceCloud, cloudHeader.stamp, lidarFrame);
+        // publish to mapOptimization
+        pubLaserCloudInfo.publish(cloudInfo);
+    }
+};
+
+
+int main(int argc, char** argv)
+{
+    ros::init(argc, argv, "lio_sam");
+
+    FeatureExtraction FE;
+
+    ROS_INFO("\033[1;32m----> Feature Extraction Started.\033[0m");
+   
+    ros::spin();
+
+    return 0;
+}

+ 660 - 0
src/imageProjection.cpp

@@ -0,0 +1,660 @@
+#include "utility.h"
+#include "lio_sam/cloud_info.h"
+#include <std_msgs/Float64.h>
+// Velodyne
+//struct PointXYZIRT {
+//    PCL_ADD_POINT4D
+//
+//    PCL_ADD_INTENSITY;
+//    double time;
+//    uint16_t ring;
+//
+//    EIGEN_MAKE_ALIGNED_OPERATOR_NEW
+//} EIGEN_ALIGN16;
+//
+//POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRT,
+//                                   (float, x, x)(float, y, y)(float, z, z)(float, intensity, intensity)(double, time,
+//                                                                                                        time)
+//                                           (uint16_t, ring, ring)
+//)
+struct PointXYZIRT
+{
+    PCL_ADD_POINT4D
+    PCL_ADD_INTENSITY;
+    uint16_t ring;
+    float time;
+    EIGEN_MAKE_ALIGNED_OPERATOR_NEW
+} EIGEN_ALIGN16;
+
+POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRT,
+                                   (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
+                                           (uint16_t, ring, ring) (float, time, time)
+)
+// Ouster
+// struct PointXYZIRT {
+//     PCL_ADD_POINT4D;
+//     float intensity;
+//     uint32_t t;
+//     uint16_t reflectivity;
+//     uint8_t ring;
+//     uint16_t noise;
+//     uint32_t range;
+//     EIGEN_MAKE_ALIGNED_OPERATOR_NEW
+// }EIGEN_ALIGN16;
+
+// POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRT,
+//     (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
+//     (uint32_t, t, t) (uint16_t, reflectivity, reflectivity)
+//     (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)
+// )
+
+const int queueLength = 2000;
+
+class ImageProjection : public ParamServer {
+private:
+
+    std::mutex imuLock;
+    std::mutex wheelOdomLock;
+    std::mutex odoLock;
+
+    ros::Subscriber subLaserCloud;
+    ros::Publisher pubLaserCloud;
+
+    ros::Publisher pubExtractedCloud;
+    ros::Publisher pubLaserCloudInfo;
+
+    ros::Subscriber subImu;
+    ros::Subscriber subWheelOdom;
+    ros::Subscriber subLidarPose;
+    std::deque<sensor_msgs::Imu> imuQueue;
+    std::deque<nav_msgs::Odometry> wheelOdomQueue;
+
+    ros::Subscriber subOdom;
+    std::deque<nav_msgs::Odometry> odomQueue;
+
+    std::deque<sensor_msgs::PointCloud2> cloudQueue;
+    sensor_msgs::PointCloud2 currentCloudMsg;
+
+    double *imuTime = new double[queueLength];
+    double *imuRotX = new double[queueLength];
+    double *imuRotY = new double[queueLength];
+    double *imuRotZ = new double[queueLength];
+
+    int imuPointerCur;
+    bool firstPointFlag;
+    Eigen::Affine3f transStartInverse;
+
+    pcl::PointCloud<PointXYZIRT>::Ptr laserCloudIn;
+    pcl::PointCloud<PointType>::Ptr fullCloud;
+    pcl::PointCloud<PointType>::Ptr tempCloud;
+    pcl::PointCloud<PointType>::Ptr extractedCloud;
+
+    int deskewFlag;
+    cv::Mat rangeMat;
+
+    bool odomDeskewFlag;
+    float odomIncreX;
+    float odomIncreY;
+    float odomIncreZ;
+
+    lio_sam::cloud_info cloudInfo;
+    double timeScanCur;
+    double timeScanEnd;
+    std_msgs::Header cloudHeader;
+    double lidar_pose_x{0};
+
+public:
+    ImageProjection() :
+            deskewFlag(0) {
+        subImu = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &ImageProjection::imuHandler, this,
+                                                ros::TransportHints().tcpNoDelay());
+
+        subOdom = nh.subscribe<nav_msgs::Odometry>(odomTopic + "_incremental", 2000, &ImageProjection::odometryHandler,
+                                                   this, ros::TransportHints().tcpNoDelay());
+        subLaserCloud = nh.subscribe<sensor_msgs::PointCloud2>(pointCloudTopic, 5, &ImageProjection::cloudHandler, this,
+                                                               ros::TransportHints().tcpNoDelay());
+        subLidarPose = nh.subscribe<std_msgs::Float64>("lidar_pose", 5, &ImageProjection::lidarPoseCallback, this,
+                                                               ros::TransportHints().tcpNoDelay());
+
+        pubExtractedCloud = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/deskew/cloud_deskewed", 1);
+        pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info>("lio_sam/deskew/cloud_info", 1);
+
+        allocateMemory();
+        resetParameters();
+
+        pcl::console::setVerbosityLevel(pcl::console::L_ERROR);
+    }
+
+    void allocateMemory() {
+        laserCloudIn.reset(new pcl::PointCloud<PointXYZIRT>());
+        fullCloud.reset(new pcl::PointCloud<PointType>());
+        tempCloud.reset(new pcl::PointCloud<PointType>());
+        extractedCloud.reset(new pcl::PointCloud<PointType>());
+
+        fullCloud->points.resize(N_SCAN * Horizon_SCAN);
+        tempCloud->points.resize(N_SCAN * Horizon_SCAN);
+
+        cloudInfo.startRingIndex.assign(N_SCAN, 0);
+        cloudInfo.endRingIndex.assign(N_SCAN, 0);
+
+        cloudInfo.pointColInd.assign(N_SCAN * Horizon_SCAN, 0);
+        cloudInfo.pointRange.assign(N_SCAN * Horizon_SCAN, 0);
+
+        resetParameters();
+    }
+
+    void resetParameters() {
+        laserCloudIn->clear();
+        extractedCloud->clear();
+        // reset range matrix for range image projection
+        rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));
+
+        imuPointerCur = 0;
+        firstPointFlag = true;
+        odomDeskewFlag = false;
+
+        for (int i = 0; i < queueLength; ++i) {
+            imuTime[i] = 0;
+            imuRotX[i] = 0;
+            imuRotY[i] = 0;
+            imuRotZ[i] = 0;
+        }
+    }
+
+    ~ImageProjection() {}
+
+    void imuHandler(const sensor_msgs::Imu::ConstPtr &imuMsg) {
+        sensor_msgs::Imu thisImu = imuConverter(*imuMsg);
+        std::lock_guard<std::mutex> lock1(imuLock);
+
+        imuQueue.push_back(thisImu);
+
+//         debug IMU data
+//         cout << std::setprecision(6);
+//         cout << "IMU acc: " << endl;
+//         cout << "x: " << thisImu.linear_acceleration.x <<
+//               ", y: " << thisImu.linear_acceleration.y <<
+//               ", z: " << thisImu.linear_acceleration.z << endl;
+//         cout << "IMU gyro: " << endl;
+//         cout << "x: " << thisImu.angular_velocity.x <<
+//               ", y: " << thisImu.angular_velocity.y <<
+//               ", z: " << thisImu.angular_velocity.z << endl;
+//         double imuRoll, imuPitch, imuYaw;
+//         tf::Quaternion orientation;
+//         tf::quaternionMsgToTF(thisImu.orientation, orientation);
+//         tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
+//         cout << "IMU roll pitch yaw: " << endl;
+//         cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl;
+    }
+
+    void wheelOdomHandler(const nav_msgs::Odometry::ConstPtr &odomMsg) {
+        std::lock_guard<std::mutex> lock1(wheelOdomLock);
+
+        wheelOdomQueue.push_back(*odomMsg);
+
+    }
+
+    void odometryHandler(const nav_msgs::Odometry::ConstPtr &odometryMsg) {
+        std::lock_guard<std::mutex> lock2(odoLock);
+        odomQueue.push_back(*odometryMsg);
+    }
+    void lidarPoseCallback(const std_msgs::Float64ConstPtr &msg){
+        lidar_pose_x = msg->data;
+    }
+    void cloudHandler(const sensor_msgs::PointCloud2ConstPtr &laserCloudMsg) {
+
+        if (!cachePointCloud(laserCloudMsg))
+            return;
+        if (!deskewInfo())
+            return;
+
+        projectPointCloud();
+
+        cloudExtraction();
+        publishClouds();
+        resetParameters();
+    }
+
+    bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr &laserCloudMsg) {
+        // cache point cloud
+        cloudQueue.push_back(*laserCloudMsg);
+        if (cloudQueue.size() <= 2)
+            return false;
+
+        // convert cloud
+        currentCloudMsg = cloudQueue.front();
+        cloudQueue.pop_front();
+        pcl::fromROSMsg(currentCloudMsg, *laserCloudIn);
+
+        // get timestamp
+        cloudHeader = currentCloudMsg.header;
+        timeScanCur = cloudHeader.stamp.toSec();
+        timeScanEnd = laserCloudIn->points.back().time; // Velodyne
+//        ROS_INFO("time %f %f %f", laserCloudIn->points.back().time,laserCloudIn->points.front().time,
+//                 laserCloudIn->points.back().time - laserCloudIn->points.front().time);
+//        timeScanEnd = timeScanCur + laserCloudIn->points.back().time; // Velodyne
+        // timeScanEnd = timeScanCur + (float)laserCloudIn->points.back().t / 1000000000.0; // Ouster
+
+        // check dense flag
+        if (laserCloudIn->is_dense == false and useRing) {
+            ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!");
+            ros::shutdown();
+        }
+
+        // check ring channel
+        static int ringFlag = 0;
+        if (ringFlag == 0 and useRing) {
+            ringFlag = -1;
+            for (int i = 0; i < (int) currentCloudMsg.fields.size(); ++i) {
+                if (currentCloudMsg.fields[i].name == "ring") {
+                    ringFlag = 1;
+                    break;
+                }
+            }
+            if (ringFlag == -1) {
+                ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!");
+                ros::shutdown();
+            }
+        }
+
+        // check point time
+        if (deskewFlag == 0) {
+            deskewFlag = -1;
+            for (int i = 0; i < (int) currentCloudMsg.fields.size(); ++i) {
+                if (currentCloudMsg.fields[i].name == timeField) {
+                    deskewFlag = 1;
+                    break;
+                }
+            }
+            if (deskewFlag == -1)
+                ROS_WARN(
+                        "Point cloud timestamp not available, deskew function disabled, system will drift significantly!");
+        }
+        return true;
+    }
+
+    bool deskewInfo() {
+        std::lock_guard<std::mutex> lock1(imuLock);
+        std::lock_guard<std::mutex> lock2(odoLock);
+
+        // make sure IMU data available for the scan
+
+
+        if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur ||
+            imuQueue.back().header.stamp.toSec() < timeScanEnd) {
+            ROS_INFO("Waiting for IMU data ...");
+            return false;
+        }
+
+
+        imuDeskewInfo();
+        odomDeskewInfo();
+        return true;
+    }
+
+    void imuDeskewInfo() {
+        cloudInfo.imuAvailable = false;
+
+        while (!imuQueue.empty()) {
+            if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.05)
+                imuQueue.pop_front();
+            else
+                break;
+        }
+
+        if (imuQueue.empty())
+            return;
+
+        imuPointerCur = 0;
+
+        for (int i = 0; i < (int) imuQueue.size(); ++i) {
+            sensor_msgs::Imu thisImuMsg = imuQueue[i];
+            double currentImuTime = thisImuMsg.header.stamp.toSec();
+
+            // get roll, pitch, and yaw estimation for this scan
+            if (currentImuTime <= timeScanCur)
+                imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);
+//                ROS_INFO("info %f %f %f", cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);
+            if (currentImuTime > timeScanEnd + 0.01)
+                break;
+
+            if (imuPointerCur == 0) {
+                imuRotX[0] = 0;
+                imuRotY[0] = 0;
+                imuRotZ[0] = 0;
+                imuTime[0] = currentImuTime;
+                ++imuPointerCur;
+                continue;
+            }
+
+            // get angular velocity
+            double angular_x, angular_y, angular_z;
+            imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);
+
+            // integrate rotation
+            double timeDiff = currentImuTime - imuTime[imuPointerCur - 1];
+            imuRotX[imuPointerCur] = imuRotX[imuPointerCur - 1] + angular_x * timeDiff;
+            imuRotY[imuPointerCur] = imuRotY[imuPointerCur - 1] + angular_y * timeDiff;
+            imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur - 1] + angular_z * timeDiff;
+            imuTime[imuPointerCur] = currentImuTime;
+            ++imuPointerCur;
+        }
+
+        --imuPointerCur;
+
+        if (imuPointerCur <= 0)
+            return;
+        cloudInfo.imuAvailable = true;
+    }
+
+    void wheelOdomDeskewInfo() {
+        cloudInfo.imuAvailable = false;
+
+        while (!wheelOdomQueue.empty()) {
+            if (wheelOdomQueue.front().header.stamp.toSec() < timeScanCur - 0.05)
+                wheelOdomQueue.pop_front();
+            else
+                break;
+        }
+
+        if (wheelOdomQueue.empty())
+            return;
+
+        imuPointerCur = 0;
+
+        for (int i = 0; i < (int) wheelOdomQueue.size(); ++i) {
+            nav_msgs::Odometry thisOdomMsg = wheelOdomQueue[i];
+            double currentOdomTime = thisOdomMsg.header.stamp.toSec();
+
+            // get roll, pitch, and yaw estimation for this scan
+            if (currentOdomTime <= timeScanCur)
+                cloudInfo.imuYawInit = tf::getYaw(thisOdomMsg.pose.pose.orientation);
+
+
+            if (currentOdomTime > timeScanEnd + 0.01)
+                break;
+
+            if (imuPointerCur == 0) {
+                imuRotX[0] = 0;
+                imuRotY[0] = 0;
+                imuRotZ[0] = 0;
+                imuTime[0] = currentOdomTime;
+                ++imuPointerCur;
+                continue;
+            }
+
+            // get angular velocity
+            double angular_x, angular_y, angular_z;
+            angular_x = thisOdomMsg.twist.twist.angular.x;
+            angular_y = thisOdomMsg.twist.twist.angular.y;
+            angular_z = thisOdomMsg.twist.twist.angular.z;
+            // integrate rotation
+            double timeDiff = currentOdomTime - imuTime[imuPointerCur - 1];
+            imuRotX[imuPointerCur] = imuRotX[imuPointerCur - 1] + angular_x * timeDiff;
+            imuRotY[imuPointerCur] = imuRotY[imuPointerCur - 1] + angular_y * timeDiff;
+            imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur - 1] + angular_z * timeDiff;
+            imuTime[imuPointerCur] = currentOdomTime;
+            ++imuPointerCur;
+        }
+
+        --imuPointerCur;
+
+        if (imuPointerCur <= 0)
+            return;
+        cloudInfo.imuAvailable = true;
+    }
+
+    void odomDeskewInfo() {
+        cloudInfo.odomAvailable = false;
+
+        while (!odomQueue.empty()) {
+            if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.05)
+                odomQueue.pop_front();
+            else
+                break;
+        }
+
+        if (odomQueue.empty())
+            return;
+
+        if (odomQueue.front().header.stamp.toSec() > timeScanCur)
+            return;
+        // get start odometry at the beinning of the scan
+        nav_msgs::Odometry startOdomMsg;
+        for (int i = 0; i < (int) odomQueue.size(); ++i) {
+            startOdomMsg = odomQueue[i];
+
+            if (ROS_TIME(&startOdomMsg) < timeScanCur)
+                continue;
+            else
+                break;
+        }
+
+        tf::Quaternion orientation;
+        tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);
+
+        double roll, pitch, yaw;
+        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
+        // Initial guess used in mapOptimization
+        cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;
+        cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;
+        cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;
+        cloudInfo.initialGuessRoll = roll;
+        cloudInfo.initialGuessPitch = pitch;
+        cloudInfo.initialGuessYaw = yaw;
+
+        cloudInfo.odomAvailable = true;
+
+        // get end odometry at the end of the scan
+        odomDeskewFlag = false;
+
+        if (odomQueue.back().header.stamp.toSec() < timeScanEnd)
+            return;
+
+        nav_msgs::Odometry endOdomMsg;
+
+        for (int i = 0; i < (int) odomQueue.size(); ++i) {
+            endOdomMsg = odomQueue[i];
+
+            if (ROS_TIME(&endOdomMsg) < timeScanEnd)
+                continue;
+            else
+                break;
+        }
+
+        if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))
+            return;
+
+        Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x,
+                                                            startOdomMsg.pose.pose.position.y,
+                                                            startOdomMsg.pose.pose.position.z, roll, pitch, yaw);
+
+        tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);
+        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
+        Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x,
+                                                          endOdomMsg.pose.pose.position.y,
+                                                          endOdomMsg.pose.pose.position.z, roll, pitch, yaw);
+
+        Eigen::Affine3f transBt = transBegin.inverse() * transEnd;
+
+        float rollIncre, pitchIncre, yawIncre;
+        pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre);
+
+        odomDeskewFlag = true;
+    }
+
+    void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur) {
+        *rotXCur = 0;
+        *rotYCur = 0;
+        *rotZCur = 0;
+
+        int imuPointerFront = 0;
+        while (imuPointerFront < imuPointerCur) {
+            if (pointTime < imuTime[imuPointerFront])
+                break;
+            ++imuPointerFront;
+        }
+
+        if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0) {
+            *rotXCur = imuRotX[imuPointerFront];
+            *rotYCur = imuRotY[imuPointerFront];
+            *rotZCur = imuRotZ[imuPointerFront];
+        } else {
+            int imuPointerBack = imuPointerFront - 1;
+            double ratioFront =
+                    (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
+            double ratioBack =
+                    (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
+            *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;
+            *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;
+            *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;
+        }
+    }
+
+    void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur) {
+        *posXCur = 0;
+        *posYCur = 0;
+        *posZCur = 0;
+
+        // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.
+
+        // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)
+        //     return;
+
+        // float ratio = relTime / (timeScanEnd - timeScanCur);
+
+        // *posXCur = ratio * odomIncreX;
+        // *posYCur = ratio * odomIncreY;
+        // *posZCur = ratio * odomIncreZ;
+    }
+
+    PointType deskewPoint(PointType *point, double relTime) {
+        if (deskewFlag == -1 || cloudInfo.imuAvailable == false)
+            return *point;
+
+        double pointTime = timeScanCur + relTime;
+
+        float rotXCur, rotYCur, rotZCur;
+        findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);
+
+        float posXCur, posYCur, posZCur;
+        findPosition(relTime, &posXCur, &posYCur, &posZCur);
+
+        if (firstPointFlag == true) {
+            transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur,
+                                                        rotZCur)).inverse();
+            firstPointFlag = false;
+        }
+
+        // transform points to start
+        Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur);
+        Eigen::Affine3f transBt = transStartInverse * transFinal;
+
+        PointType newPoint;
+        newPoint.x = transBt(0, 0) * point->x + transBt(0, 1) * point->y + transBt(0, 2) * point->z + transBt(0, 3);
+        newPoint.y = transBt(1, 0) * point->x + transBt(1, 1) * point->y + transBt(1, 2) * point->z + transBt(1, 3);
+        newPoint.z = transBt(2, 0) * point->x + transBt(2, 1) * point->y + transBt(2, 2) * point->z + transBt(2, 3);
+        newPoint.intensity = point->intensity;
+
+        return newPoint;
+    }
+
+    void projectPointCloud() {
+        int cloudSize = laserCloudIn->points.size();
+        // range image projection
+        for (int i = 0; i < cloudSize; ++i) {
+            PointType thisPoint;
+            thisPoint.x = laserCloudIn->points[i].x;
+            thisPoint.y = laserCloudIn->points[i].y;
+            thisPoint.z = laserCloudIn->points[i].z;
+            thisPoint.intensity = laserCloudIn->points[i].intensity;
+
+            float range = pointDistance(thisPoint);
+            if (range < lidarMinRange || range > lidarMaxRange)
+                continue;
+
+            int rowIdn = laserCloudIn->points[i].ring;
+            if (not useRing) {
+                auto verticalAngle =
+                        atan2(thisPoint.z, sqrt(thisPoint.x * thisPoint.x + thisPoint.y * thisPoint.y)) * 180 / M_PI;
+                rowIdn = static_cast<int>((verticalAngle + 15.0) / 2.0);
+            }
+            if (rowIdn < 0 || rowIdn >= N_SCAN || thisPoint.z < -0.45)
+                continue;
+
+            if (rowIdn % downsampleRate != 0)
+                continue;
+
+            float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;
+
+            static float ang_res_x = 360.0 / float(Horizon_SCAN);
+            int columnIdn = -round((horizonAngle - 90.0) / ang_res_x) + Horizon_SCAN / 2;
+            if (columnIdn >= Horizon_SCAN)
+                columnIdn -= Horizon_SCAN;
+
+            if (columnIdn < 0 || columnIdn >= Horizon_SCAN)
+                continue;
+
+            if (rangeMat.at<float>(rowIdn, columnIdn) != FLT_MAX)
+                continue;
+
+            thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); // Velodyne
+            // thisPoint = deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0); // Ouster
+
+            rangeMat.at<float>(rowIdn, columnIdn) = range;
+
+            int index = columnIdn + rowIdn * Horizon_SCAN;
+
+
+            tempCloud->points[index] = thisPoint;
+        }
+        Eigen::Translation3f tl_m2w(0, 0, 0);                 // tl: translation
+        Eigen::AngleAxisf rot_x_m2w(lidar_pose_x, Eigen::Vector3f::UnitX());  // rot: rotation
+        Eigen::AngleAxisf rot_y_m2w(0, Eigen::Vector3f::UnitY());
+        Eigen::AngleAxisf rot_z_m2w(0, Eigen::Vector3f::UnitZ());
+        Eigen::Matrix4f tf_m2w = (tl_m2w * rot_z_m2w * rot_y_m2w * rot_x_m2w).matrix();
+        pcl::transformPointCloud(*tempCloud, *fullCloud, tf_m2w);
+    }
+
+    void cloudExtraction() {
+        int count = 0;
+        // extract segmented cloud for lidar odometry
+        for (int i = 0; i < N_SCAN; ++i) {
+            cloudInfo.startRingIndex[i] = count - 1 + 5; // 5 是为了后面进行计算平滑时留出的空间
+
+            for (int j = 0; j < Horizon_SCAN; ++j) {
+                if (rangeMat.at<float>(i, j) != FLT_MAX) //判断是否有测量数值,如果没有默认为FLT_MAX
+                {
+                    // mark the points' column index for marking occlusion later
+                    cloudInfo.pointColInd[count] = j;
+                    // save range info
+                    cloudInfo.pointRange[count] = rangeMat.at<float>(i, j);
+                    // save extracted cloud
+                    extractedCloud->push_back(fullCloud->points[j + i * Horizon_SCAN]);
+                    // size of extracted cloud
+                    ++count;
+                }
+            }
+            cloudInfo.endRingIndex[i] = count - 1 - 5;
+        }
+    }
+
+    void publishClouds() {
+        cloudInfo.header = cloudHeader;
+        cloudInfo.cloud_deskewed = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame);
+        pubLaserCloudInfo.publish(cloudInfo);
+    }
+};
+
+int main(int argc, char **argv) {
+    ros::init(argc, argv, "lio_sam");
+
+    ImageProjection IP;
+
+    ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m");
+
+    ros::MultiThreadedSpinner spinner(3);
+    spinner.spin();
+
+    return 0;
+}

+ 648 - 0
src/imuPreintegration.cpp

@@ -0,0 +1,648 @@
+#include "utility.h"
+
+#include <gtsam/geometry/Rot3.h>
+#include <gtsam/geometry/Pose3.h>
+#include <gtsam/slam/PriorFactor.h>
+#include <gtsam/slam/BetweenFactor.h>
+#include <gtsam/navigation/GPSFactor.h>
+#include <gtsam/navigation/ImuFactor.h>
+#include <gtsam/navigation/CombinedImuFactor.h>
+#include <gtsam/nonlinear/NonlinearFactorGraph.h>
+#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
+#include <gtsam/nonlinear/Marginals.h>
+#include <gtsam/nonlinear/Values.h>
+#include <gtsam/inference/Symbol.h>
+
+#include <gtsam/nonlinear/ISAM2.h>
+#include <gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h>
+#include <Eigen/Core>
+#include <Eigen/Geometry>
+
+using gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
+using gtsam::symbol_shorthand::V; // Vel   (xdot,ydot,zdot)
+using gtsam::symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)
+
+class TransformFusion : public ParamServer {
+public:
+    std::mutex mtx;
+
+    ros::Subscriber subImuOdometry;
+    ros::Subscriber odomSub;
+    ros::Subscriber subLaserOdometry;
+
+    ros::Publisher pubImuOdometry;
+    ros::Publisher pubImuPath;
+
+    Eigen::Affine3f lidarOdomAffine;
+    Eigen::Affine3f imuOdomAffineFront;
+    Eigen::Affine3f imuOdomAffineBack;
+
+    tf::TransformListener tfListener;
+    tf::StampedTransform lidar2Baselink;
+
+    double lidarOdomTime = -1;
+    deque<nav_msgs::Odometry> imuOdomQueue;
+    deque<nav_msgs::Odometry> odomQueue;
+
+    TransformFusion() {
+        if (lidarFrame != baselinkFrame) {
+            try {
+                tfListener.waitForTransform(lidarFrame, baselinkFrame, ros::Time(0), ros::Duration(3.0));
+                tfListener.lookupTransform(lidarFrame, baselinkFrame, ros::Time(0), lidar2Baselink);
+            }
+            catch (tf::TransformException ex) {
+                ROS_ERROR("%s", ex.what());
+            }
+        }
+
+        subLaserOdometry = nh.subscribe<nav_msgs::Odometry>("lio_sam/mapping/odometry", 5,
+                                                            &TransformFusion::lidarOdometryHandler, this,
+                                                            ros::TransportHints().tcpNoDelay());
+        subImuOdometry = nh.subscribe<nav_msgs::Odometry>(odomTopic + "_incremental", 2000,
+                                                          &TransformFusion::imuOdometryHandler, this,
+                                                          ros::TransportHints().tcpNoDelay());
+
+
+        pubImuOdometry = nh.advertise<nav_msgs::Odometry>(odomTopic, 2000);
+        pubImuPath = nh.advertise<nav_msgs::Path>("lio_sam/imu/path", 1);
+    }
+
+    Eigen::Affine3f odom2affine(nav_msgs::Odometry odom) {
+        double x, y, z, roll, pitch, yaw;
+        x = odom.pose.pose.position.x;
+        y = odom.pose.pose.position.y;
+        z = odom.pose.pose.position.z;
+        tf::Quaternion orientation;
+        tf::quaternionMsgToTF(odom.pose.pose.orientation, orientation);
+        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
+        return pcl::getTransformation(x, y, z, roll, pitch, yaw);
+    }
+
+    void lidarOdometryHandler(const nav_msgs::Odometry::ConstPtr &odomMsg) {
+        std::lock_guard<std::mutex> lock(mtx);
+
+        lidarOdomAffine = odom2affine(*odomMsg);
+
+        lidarOdomTime = odomMsg->header.stamp.toSec();
+    }
+
+    void odomHandler(const nav_msgs::Odometry::ConstPtr &odomMsg) {
+        // static tf
+        static tf::TransformBroadcaster tfMap2Odom;
+        static tf::Transform map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0));
+        tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, odomMsg->header.stamp, mapFrame, odometryFrame));
+
+        std::lock_guard<std::mutex> lock(mtx);
+
+        odomQueue.push_back(*odomMsg);
+
+        // get latest odometry (at current IMU stamp)
+        if (lidarOdomTime == -1)
+            return;
+        while (!odomQueue.empty()) {
+            if (odomQueue.front().header.stamp.toSec() <= lidarOdomTime)
+                odomQueue.pop_front();
+            else
+                break;
+        }
+        Eigen::Affine3f odomAffineFront = odom2affine(odomQueue.front());
+        Eigen::Affine3f odomAffineBack = odom2affine(odomQueue.back());
+        Eigen::Affine3f odomAffineIncre = odomAffineFront.inverse() * odomAffineBack;
+        Eigen::Affine3f odomAffineLast = lidarOdomAffine * odomAffineIncre;
+        float x, y, z, roll, pitch, yaw;
+        pcl::getTranslationAndEulerAngles(odomAffineLast, x, y, z, roll, pitch, yaw);
+
+        // publish latest odometry
+        nav_msgs::Odometry laserOdometry = odomQueue.back();
+        laserOdometry.pose.pose.position.x = x;
+        laserOdometry.pose.pose.position.y = y;
+        laserOdometry.pose.pose.position.z = z;
+        laserOdometry.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
+        pubImuOdometry.publish(laserOdometry);
+
+        // publish tf
+        static tf::TransformBroadcaster tfOdom2BaseLink;
+        tf::Transform tCur;
+        tf::poseMsgToTF(laserOdometry.pose.pose, tCur);
+        if (lidarFrame != baselinkFrame)
+            tCur = tCur * lidar2Baselink;
+        tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, odomMsg->header.stamp, odometryFrame,
+                                                                    baselinkFrame);
+        tfOdom2BaseLink.sendTransform(odom_2_baselink);
+
+        // publish IMU path
+        static nav_msgs::Path odomPath;
+        static double last_path_time = -1;
+        double odomTime = odomQueue.back().header.stamp.toSec();
+        if (odomTime - last_path_time > 0.1) {
+            last_path_time = odomTime;
+            geometry_msgs::PoseStamped pose_stamped;
+            pose_stamped.header.stamp = odomQueue.back().header.stamp;
+            pose_stamped.header.frame_id = odometryFrame;
+            pose_stamped.pose = laserOdometry.pose.pose;
+            odomPath.poses.push_back(pose_stamped);
+            while (!odomPath.poses.empty() && odomPath.poses.front().header.stamp.toSec() < lidarOdomTime - 1.0)
+                odomPath.poses.erase(odomPath.poses.begin());
+            if (pubImuPath.getNumSubscribers() != 0) {
+                odomPath.header.stamp = odomQueue.back().header.stamp;
+                odomPath.header.frame_id = odometryFrame;
+                pubImuPath.publish(odomPath);
+            }
+        }
+    }
+
+    void imuOdometryHandler(const nav_msgs::Odometry::ConstPtr &odomMsg) {
+        // static tf
+        static tf::TransformBroadcaster tfMap2Odom;
+        static tf::Transform map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0));
+        tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, odomMsg->header.stamp, mapFrame, odometryFrame));
+
+        std::lock_guard<std::mutex> lock(mtx);
+
+        imuOdomQueue.push_back(*odomMsg);
+
+        // get latest odometry (at current IMU stamp)
+        if (lidarOdomTime == -1)
+            return;
+        while (!imuOdomQueue.empty()) {
+            if (imuOdomQueue.front().header.stamp.toSec() <= lidarOdomTime)
+                imuOdomQueue.pop_front();
+            else
+                break;
+        }
+        Eigen::Affine3f imuOdomAffineFront = odom2affine(imuOdomQueue.front());
+        Eigen::Affine3f imuOdomAffineBack = odom2affine(imuOdomQueue.back());
+        Eigen::Affine3f imuOdomAffineIncre = imuOdomAffineFront.inverse() * imuOdomAffineBack;
+        Eigen::Affine3f imuOdomAffineLast = lidarOdomAffine * imuOdomAffineIncre;
+        float x, y, z, roll, pitch, yaw;
+        pcl::getTranslationAndEulerAngles(imuOdomAffineLast, x, y, z, roll, pitch, yaw);
+
+        // publish latest odometry
+        nav_msgs::Odometry laserOdometry = imuOdomQueue.back();
+        laserOdometry.pose.pose.position.x = x;
+        laserOdometry.pose.pose.position.y = y;
+        laserOdometry.pose.pose.position.z = z;
+        laserOdometry.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
+        pubImuOdometry.publish(laserOdometry);
+
+        // publish tf
+        static tf::TransformBroadcaster tfOdom2BaseLink;
+        tf::Transform tCur;
+        tf::poseMsgToTF(laserOdometry.pose.pose, tCur);
+        if (lidarFrame != baselinkFrame)
+            tCur = tCur * lidar2Baselink;
+        tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, odomMsg->header.stamp, odometryFrame,
+                                                                    baselinkFrame);
+        tfOdom2BaseLink.sendTransform(odom_2_baselink);
+
+        // publish IMU path
+        static nav_msgs::Path imuPath;
+        static double last_path_time = -1;
+        double imuTime = imuOdomQueue.back().header.stamp.toSec();
+        if (imuTime - last_path_time > 0.1) {
+            last_path_time = imuTime;
+            geometry_msgs::PoseStamped pose_stamped;
+            pose_stamped.header.stamp = imuOdomQueue.back().header.stamp;
+            pose_stamped.header.frame_id = odometryFrame;
+            pose_stamped.pose = laserOdometry.pose.pose;
+            imuPath.poses.push_back(pose_stamped);
+            while (!imuPath.poses.empty() && imuPath.poses.front().header.stamp.toSec() < lidarOdomTime - 1.0)
+                imuPath.poses.erase(imuPath.poses.begin());
+            if (pubImuPath.getNumSubscribers() != 0) {
+                imuPath.header.stamp = imuOdomQueue.back().header.stamp;
+                imuPath.header.frame_id = odometryFrame;
+                pubImuPath.publish(imuPath);
+            }
+        }
+    }
+};
+
+class IMUPreintegration : public ParamServer {
+public:
+
+    std::mutex mtx;
+
+    ros::Subscriber subImu;
+    ros::Subscriber subOdom;
+    ros::Subscriber subOdometry;
+    ros::Publisher pubImuOdometry;
+
+    bool systemInitialized = false;
+
+    gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise;
+    gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise;
+    gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise;
+    gtsam::noiseModel::Diagonal::shared_ptr correctionNoise;
+    gtsam::noiseModel::Diagonal::shared_ptr correctionNoise2;
+    gtsam::Vector noiseModelBetweenBias;
+
+
+    gtsam::PreintegratedImuMeasurements *imuIntegratorOpt_;
+    gtsam::PreintegratedImuMeasurements *imuIntegratorImu_;
+
+    std::deque<sensor_msgs::Imu> imuQueOpt;
+    std::deque<sensor_msgs::Imu> imuQueImu;
+    nav_msgs::Odometry odomPose;
+
+    gtsam::Pose3 prevPose_;
+    gtsam::Vector3 prevVel_;
+    gtsam::NavState prevState_;
+    gtsam::imuBias::ConstantBias prevBias_;
+
+    Eigen::Isometry3d posePrev = Eigen::Isometry3d::Identity();
+
+    gtsam::NavState prevStateOdom;
+    gtsam::imuBias::ConstantBias prevBiasOdom;
+
+    bool doneFirstOpt = false;
+    double lastImuT_imu = -1;
+    double lastImuT_opt = -1;
+
+    gtsam::ISAM2 optimizer;
+    gtsam::NonlinearFactorGraph graphFactors;
+    gtsam::Values graphValues;
+
+    const double delta_t = 0;
+
+    int key = 1;
+
+    gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0),
+                                          gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z()));
+    gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0),
+                                          gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z()));
+
+    IMUPreintegration() {
+        subOdom = nh.subscribe<nav_msgs::Odometry>("odom", 2000, &IMUPreintegration::odomHandler, this,
+                                                   ros::TransportHints().tcpNoDelay());
+
+        subImu = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &IMUPreintegration::imuHandler, this,
+                                                ros::TransportHints().tcpNoDelay());
+
+
+        subOdometry = nh.subscribe<nav_msgs::Odometry>("lio_sam/mapping/odometry_incremental", 5,
+                                                       &IMUPreintegration::odometryHandler, this,
+                                                       ros::TransportHints().tcpNoDelay());
+
+        pubImuOdometry = nh.advertise<nav_msgs::Odometry>(odomTopic + "_incremental", 2000);
+
+        boost::shared_ptr<gtsam::PreintegrationParams> p = gtsam::PreintegrationParams::MakeSharedU(imuGravity);
+        p->accelerometerCovariance =
+                gtsam::Matrix33::Identity(3, 3) * pow(imuAccNoise, 2); // acc white noise in continuous
+        p->gyroscopeCovariance =
+                gtsam::Matrix33::Identity(3, 3) * pow(imuGyrNoise, 2); // gyro white noise in continuous
+        p->integrationCovariance = gtsam::Matrix33::Identity(3, 3) *
+                                   pow(1e-4, 2); // error committed in integrating position from velocities
+        gtsam::imuBias::ConstantBias prior_imu_bias(
+                (gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished());; // assume zero initial bias
+
+        priorPoseNoise = gtsam::noiseModel::Diagonal::Sigmas(
+                (gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m
+        priorVelNoise = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s
+        priorBiasNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good
+        correctionNoise = gtsam::noiseModel::Diagonal::Sigmas(
+                (gtsam::Vector(6) << 0.05, 0.05, 0.05, 0.1, 0.1, 0.1).finished()); // rad,rad,rad,m, m, m
+        correctionNoise2 = gtsam::noiseModel::Diagonal::Sigmas(
+                (gtsam::Vector(6) << 1, 1, 1, 1, 1, 1).finished()); // rad,rad,rad,m, m, m
+        noiseModelBetweenBias = (gtsam::Vector(6)
+                << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished();
+
+        imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p,
+                                                                    prior_imu_bias); // setting up the IMU integration for IMU message thread
+        imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p,
+                                                                    prior_imu_bias); // setting up the IMU integration for optimization
+    }
+
+    void resetOptimization() {
+        gtsam::ISAM2Params optParameters;
+        optParameters.relinearizeThreshold = 0.1;
+        optParameters.relinearizeSkip = 1;
+        optimizer = gtsam::ISAM2(optParameters);
+
+        gtsam::NonlinearFactorGraph newGraphFactors;
+        graphFactors = newGraphFactors;
+
+        gtsam::Values NewGraphValues;
+        graphValues = NewGraphValues;
+    }
+
+    void resetParams() {
+        lastImuT_imu = -1;
+        doneFirstOpt = false;
+        systemInitialized = false;
+    }
+
+    void odometryHandler(const nav_msgs::Odometry::ConstPtr &odomMsg) {
+        std::lock_guard<std::mutex> lock(mtx);
+        odomPose.pose = odomMsg->pose;
+//        std::cout << "--------------------------------------------" << std::endl;
+//        ROS_INFO("pose get", odomPose.pose.pose.position.x,
+//                 odomPose.pose.pose.position.y,
+//                 odomPose.pose.pose.position.z
+//                 );
+        double currentCorrectionTime = ROS_TIME(odomMsg);
+
+        // make sure we have imu data to integrate
+        if (imuQueOpt.empty())
+            return;
+
+        float p_x = odomMsg->pose.pose.position.x;
+        float p_y = odomMsg->pose.pose.position.y;
+        float p_z = odomMsg->pose.pose.position.z;
+        float r_x = odomMsg->pose.pose.orientation.x;
+        float r_y = odomMsg->pose.pose.orientation.y;
+        float r_z = odomMsg->pose.pose.orientation.z;
+        float r_w = odomMsg->pose.pose.orientation.w;
+        bool degenerate = (int) odomMsg->pose.covariance[0] == 1 ? true : false;
+        gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z),
+                                              gtsam::Point3(p_x, p_y, p_z));
+
+
+        // 0. initialize system
+        if (!systemInitialized) {
+            resetOptimization();
+
+            // pop old IMU message
+            while (!imuQueOpt.empty()) {
+                if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t) {
+                    lastImuT_opt = ROS_TIME(&imuQueOpt.front());
+                    imuQueOpt.pop_front();
+                } else
+                    break;
+            }
+            // initial pose
+            prevPose_ = lidarPose.compose(lidar2Imu);
+            gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, priorPoseNoise);
+            graphFactors.add(priorPose);
+            // initial velocity
+            prevVel_ = gtsam::Vector3(0, 0, 0);
+            gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, priorVelNoise);
+            graphFactors.add(priorVel);
+            // initial bias
+            prevBias_ = gtsam::imuBias::ConstantBias();
+            gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, priorBiasNoise);
+            graphFactors.add(priorBias);
+            // add values
+            graphValues.insert(X(0), prevPose_);
+            graphValues.insert(V(0), prevVel_);
+            graphValues.insert(B(0), prevBias_);
+            // optimize once
+            optimizer.update(graphFactors, graphValues);
+            graphFactors.resize(0);
+            graphValues.clear();
+
+            imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_);
+            imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);
+
+            key = 1;
+            systemInitialized = true;
+            return;
+        }
+
+
+        // reset graph for speed
+        if (key == 100) {
+            // get updated noise before reset
+            gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(
+                    optimizer.marginalCovariance(X(key - 1)));
+            gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise = gtsam::noiseModel::Gaussian::Covariance(
+                    optimizer.marginalCovariance(V(key - 1)));
+            gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(
+                    optimizer.marginalCovariance(B(key - 1)));
+            // reset graph
+            resetOptimization();
+            // add pose
+            gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, updatedPoseNoise);
+            graphFactors.add(priorPose);
+            // add velocity
+            gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, updatedVelNoise);
+            graphFactors.add(priorVel);
+            // add bias
+            gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, updatedBiasNoise);
+            graphFactors.add(priorBias);
+            // add values
+            graphValues.insert(X(0), prevPose_);
+            graphValues.insert(V(0), prevVel_);
+            graphValues.insert(B(0), prevBias_);
+            // optimize once
+            optimizer.update(graphFactors, graphValues);
+            graphFactors.resize(0);
+            graphValues.clear();
+
+            key = 1;
+        }
+
+
+        // 1. integrate imu data and optimize
+        while (!imuQueOpt.empty()) {
+            // pop and integrate imu data that is between two optimizations
+            sensor_msgs::Imu *thisImu = &imuQueOpt.front();
+            double imuTime = ROS_TIME(thisImu);
+            if (imuTime < currentCorrectionTime - delta_t) {
+                double dt = (lastImuT_opt < 0) ? (1.0 / 100.0) : (imuTime - lastImuT_opt);
+                dt = 0.01;
+//                ROS_INFO("info Time %f %f", imuTime, lastImuT_opt);
+                imuIntegratorOpt_->integrateMeasurement(
+                        gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y,
+                                       thisImu->linear_acceleration.z),
+                        gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y,
+                                       thisImu->angular_velocity.z), dt);
+
+                lastImuT_opt = imuTime;
+                imuQueOpt.pop_front();
+            } else
+                break;
+        }
+        // add imu factor to graph
+        const gtsam::PreintegratedImuMeasurements &preint_imu = dynamic_cast<const gtsam::PreintegratedImuMeasurements &>(*imuIntegratorOpt_);
+        gtsam::ImuFactor imu_factor(X(key - 1), V(key - 1), X(key), V(key), B(key - 1), preint_imu);
+        graphFactors.add(imu_factor);
+        // add imu bias between factor
+        graphFactors.add(
+                gtsam::BetweenFactor<gtsam::imuBias::ConstantBias>(B(key - 1), B(key), gtsam::imuBias::ConstantBias(),
+                                                                   gtsam::noiseModel::Diagonal::Sigmas(
+                                                                           sqrt(imuIntegratorOpt_->deltaTij()) *
+                                                                           noiseModelBetweenBias)));
+        // add pose factor
+        gtsam::Pose3 curPose = lidarPose.compose(lidar2Imu);
+        gtsam::PriorFactor<gtsam::Pose3> pose_factor(X(key), curPose, degenerate ? correctionNoise2 : correctionNoise);
+        graphFactors.add(pose_factor);
+        // insert predicted values
+        gtsam::NavState propState_ = imuIntegratorOpt_->predict(prevState_, prevBias_);
+        graphValues.insert(X(key), propState_.pose());
+        graphValues.insert(V(key), propState_.v());
+        graphValues.insert(B(key), prevBias_);
+        // optimize
+        optimizer.update(graphFactors, graphValues);
+        optimizer.update();
+        graphFactors.resize(0);
+        graphValues.clear();
+        // Overwrite the beginning of the preintegration for the next step.
+        gtsam::Values result = optimizer.calculateEstimate();
+        prevPose_ = result.at<gtsam::Pose3>(X(key));
+        prevVel_ = result.at<gtsam::Vector3>(V(key));
+        prevState_ = gtsam::NavState(prevPose_, prevVel_);
+        prevBias_ = result.at<gtsam::imuBias::ConstantBias>(B(key));
+        // Reset the optimization preintegration object.
+        imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);
+        // check optimization
+        if (failureDetection(prevVel_, prevBias_)) {
+            resetParams();
+            return;
+        }
+
+
+        // 2. after optiization, re-propagate imu odometry preintegration
+        prevStateOdom = prevState_;
+        prevBiasOdom = prevBias_;
+        // first pop imu message older than current correction data
+        double lastImuQT = -1;
+        while (!imuQueImu.empty() && ROS_TIME(&imuQueImu.front()) < currentCorrectionTime - delta_t) {
+            lastImuQT = ROS_TIME(&imuQueImu.front());
+            imuQueImu.pop_front();
+        }
+        // repropogate
+        if (!imuQueImu.empty()) {
+            // reset bias use the newly optimized bias
+            imuIntegratorImu_->resetIntegrationAndSetBias(prevBiasOdom);
+            // integrate imu message from the beginning of this optimization
+            for (int i = 0; i < (int) imuQueImu.size(); ++i) {
+                sensor_msgs::Imu *thisImu = &imuQueImu[i];
+                double imuTime = ROS_TIME(thisImu);
+                double dt = (lastImuQT < 0) ? (1.0 / 100.0) : (imuTime - lastImuQT);
+                dt = 0.01;
+                imuIntegratorImu_->integrateMeasurement(
+                        gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y,
+                                       thisImu->linear_acceleration.z),
+                        gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y,
+                                       thisImu->angular_velocity.z), dt);
+                lastImuQT = imuTime;
+            }
+        }
+
+        ++key;
+        doneFirstOpt = true;
+    }
+
+    bool failureDetection(const gtsam::Vector3 &velCur, const gtsam::imuBias::ConstantBias &biasCur) {
+        Eigen::Vector3f vel(velCur.x(), velCur.y(), velCur.z());
+        if (vel.norm() > 30) {
+            ROS_WARN("Large velocity, reset IMU-preintegration!");
+            return true;
+        }
+
+        Eigen::Vector3f ba(biasCur.accelerometer().x(), biasCur.accelerometer().y(), biasCur.accelerometer().z());
+        Eigen::Vector3f bg(biasCur.gyroscope().x(), biasCur.gyroscope().y(), biasCur.gyroscope().z());
+        if (ba.norm() > 1.0 || bg.norm() > 1.0) {
+            ROS_WARN("Large bias, reset IMU-preintegration!");
+            return true;
+        }
+
+        return false;
+    }
+
+    void odomHandler(const nav_msgs::Odometry::ConstPtr &msg) {
+        std::lock_guard<std::mutex> lock(mtx);
+        static double timePrev{0};
+        static bool initPose{true};
+
+        if (initPose) {
+            initPose = false;
+            odomPose.pose.pose.position.x = 0;
+            odomPose.pose.pose.position.y = 0;
+            geometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromYaw(0);
+            odomPose.pose.pose.orientation = odom_quat;
+            odomPose.header.frame_id = odometryFrame;
+            odomPose.child_frame_id = "odom_imu";
+        }
+
+        auto time = ROS_TIME(msg);
+        auto dt = time - timePrev;
+        timePrev = time;
+        if (timePrev != 0) {
+            double yaw = tf::getYaw(odomPose.pose.pose.orientation);
+            odomPose.header.stamp = msg->header.stamp;
+            yaw += msg->twist.twist.angular.z * dt;
+            odomPose.pose.pose.position.x += (msg->twist.twist.linear.x) * cos(yaw) * dt;
+            odomPose.pose.pose.position.y += (msg->twist.twist.linear.x) * sin(yaw) * dt;
+            geometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromYaw(yaw);
+            odomPose.pose.pose.orientation = odom_quat;
+            odomPose.header.frame_id = odometryFrame;
+            odomPose.child_frame_id = "odom_imu";
+        }
+//        ROS_INFO("pose %f %f %f",
+//                 odomPose.pose.pose.position.x,
+//                 odomPose.pose.pose.position.y,
+//                 odomPose.pose.pose.position.z
+//                 );
+    }
+
+    void imuHandler(const sensor_msgs::Imu::ConstPtr &imu_raw) {
+        std::lock_guard<std::mutex> lock(mtx);
+        //imu数据选择平移
+        sensor_msgs::Imu thisImu = imuConverter(*imu_raw);
+        imuQueOpt.push_back(thisImu);
+        imuQueImu.push_back(thisImu);
+        tf::Quaternion quat;
+        tf::quaternionMsgToTF(thisImu.orientation, quat);
+        double roll, pitch, yaw;//定义存储r\p\y的容器
+        tf::Matrix3x3(quat).getRPY(roll, pitch, yaw);//进行转换
+//        ROS_INFO("info %f %f %f", roll,pitch,yaw);
+        if (!doneFirstOpt)
+            return;
+
+        double imuTime = ROS_TIME(&thisImu);
+        double dt = (lastImuT_imu < 0) ? (1.0 / 100.0) : (imuTime - lastImuT_imu);
+        dt = 0.01;
+        lastImuT_imu = imuTime;
+
+        // integrate this single imu message
+        imuIntegratorImu_->integrateMeasurement(
+                gtsam::Vector3(thisImu.linear_acceleration.x, thisImu.linear_acceleration.y,
+                               thisImu.linear_acceleration.z),
+                gtsam::Vector3(thisImu.angular_velocity.x, thisImu.angular_velocity.y, thisImu.angular_velocity.z), dt);
+
+        // predict odometry
+        gtsam::NavState currentState = imuIntegratorImu_->predict(prevStateOdom, prevBiasOdom);
+
+        // publish odometry
+        nav_msgs::Odometry odometry;
+        odometry.header.stamp = thisImu.header.stamp;
+        odometry.header.frame_id = odometryFrame;
+        odometry.child_frame_id = "odom_imu";
+        // transform imu pose to lidar
+        gtsam::Pose3 imuPose = gtsam::Pose3(currentState.quaternion(), currentState.position());
+        gtsam::Pose3 lidarPose = imuPose.compose(imu2Lidar);
+//        odometry.pose.pose.position.x = lidarPose.translation().x();
+//        odometry.pose.pose.position.y = lidarPose.translation().y();
+//        odometry.pose.pose.position.z = lidarPose.translation().z();
+        odometry.pose.pose.position = odomPose.pose.pose.position;
+        odometry.pose.pose.orientation.x = lidarPose.rotation().toQuaternion().x();
+        odometry.pose.pose.orientation.y = lidarPose.rotation().toQuaternion().y();
+        odometry.pose.pose.orientation.z = lidarPose.rotation().toQuaternion().z();
+        odometry.pose.pose.orientation.w = lidarPose.rotation().toQuaternion().w();
+//        ROS_INFO("info angle %f", tf::getYaw(odometry.pose.pose.orientation));
+        odometry.twist.twist.linear.x = currentState.velocity().x();
+        odometry.twist.twist.linear.y = currentState.velocity().y();
+        odometry.twist.twist.linear.z = currentState.velocity().z();
+        odometry.twist.twist.angular.x = thisImu.angular_velocity.x + prevBiasOdom.gyroscope().x();
+        odometry.twist.twist.angular.y = thisImu.angular_velocity.y + prevBiasOdom.gyroscope().y();
+        odometry.twist.twist.angular.z = thisImu.angular_velocity.z + prevBiasOdom.gyroscope().z();
+        pubImuOdometry.publish(odometry);
+    }
+};
+
+
+int main(int argc, char **argv) {
+    ros::init(argc, argv, "roboat_loam");
+
+    IMUPreintegration ImuP;
+
+    TransformFusion TF;
+
+    ROS_INFO("\033[1;32m----> IMU Preintegration Started.\033[0m");
+
+    ros::MultiThreadedSpinner spinner(4);
+    spinner.spin();
+
+    return 0;
+}

+ 1525 - 0
src/localization.cpp

@@ -0,0 +1,1525 @@
+#include "utility.h"
+#include "lio_sam/cloud_info.h"
+
+#include <gtsam/geometry/Rot3.h>
+#include <gtsam/geometry/Pose3.h>
+#include <gtsam/slam/PriorFactor.h>
+#include <gtsam/slam/BetweenFactor.h>
+#include <gtsam/navigation/GPSFactor.h>
+#include <gtsam/navigation/ImuFactor.h>
+#include <gtsam/navigation/CombinedImuFactor.h>
+#include <gtsam/nonlinear/NonlinearFactorGraph.h>
+#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
+#include <gtsam/nonlinear/Marginals.h>
+#include <gtsam/nonlinear/Values.h>
+#include <gtsam/inference/Symbol.h>
+
+#include <gtsam/nonlinear/ISAM2.h>
+#include <jsoncpp/json/json.h>
+#include <fstream>
+#include <geometry_msgs/PoseWithCovarianceStamped.h>
+#include <pcl/registration/ndt.h>
+#include "rapidjson/document.h"
+#include "rapidjson/writer.h"
+#include "rapidjson/reader.h"
+
+using namespace gtsam;
+
+using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
+using symbol_shorthand::V; // Vel   (xdot,ydot,zdot)
+using symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)
+using symbol_shorthand::G; // GPS pose
+
+/*
+    * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)
+    */
+struct PointXYZIRPYT {
+    PCL_ADD_POINT4D
+
+    PCL_ADD_INTENSITY;                  // preferred way of adding a XYZ+padding
+    float roll;
+    float pitch;
+    float yaw;
+    double time;
+
+    EIGEN_MAKE_ALIGNED_OPERATOR_NEW   // make sure our new allocators are aligned
+} EIGEN_ALIGN16;                    // enforce SSE padding for correct memory alignment
+
+POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,
+                                   (float, x, x)(float, y, y)
+                                           (float, z, z)(float, intensity, intensity)
+                                           (float, roll, roll)(float, pitch, pitch)(float, yaw, yaw)
+                                           (double, time, time))
+
+typedef PointXYZIRPYT PointTypePose;
+
+
+class mapOptimization : public ParamServer {
+
+public:
+
+    // gtsam
+    NonlinearFactorGraph gtSAMgraph;
+    Values initialEstimate;
+    ISAM2 *isam;
+    Values isamCurrentEstimate;
+    Eigen::MatrixXd poseCovariance;
+
+    ros::Publisher pubLaserCloudSurround;
+    ros::Publisher pubLaserOdometryGlobal;
+    ros::Publisher pubLaserOdometryIncremental;
+    ros::Publisher pubKeyPoses;
+    ros::Publisher pubPath;
+    ros::Publisher pubPathPoint;
+    ros::Publisher surfPub;
+    ros::Publisher cornerPub;
+
+    ros::Publisher pubHistoryKeyFrames;
+    ros::Publisher pubIcpKeyFrames;
+    ros::Publisher pubRecentKeyFrames;
+    ros::Publisher pubRecentKeyFrame;
+    ros::Publisher pubCloudRegisteredRaw;
+    ros::Publisher pubLoopConstraintEdge;
+
+    ros::Subscriber subCloud;
+    ros::Subscriber subInitPose;
+    ros::Subscriber subGPS;
+
+    std::deque<nav_msgs::Odometry> gpsQueue;
+    lio_sam::cloud_info cloudInfo;
+    PointTypePose initPose6D;
+    PointType initPose3D;
+    vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;
+    vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;
+
+    pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;
+    pcl::PointCloud<PointType>::Ptr localKeyPose3D;
+    pcl::PointCloud<PointTypePose>::Ptr localKeyPose6D;
+    pcl::PointCloud<PointType>::Ptr localizationPath;
+    pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;
+    pcl::PointCloud<PointType>::Ptr copy_cloudKeyPoses3D;
+    pcl::PointCloud<PointTypePose>::Ptr copy_cloudKeyPoses6D;
+
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization
+
+    pcl::PointCloud<PointType>::Ptr laserCloudOri;
+    pcl::PointCloud<PointType>::Ptr coeffSel;
+
+    std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation
+    std::vector<PointType> coeffSelCornerVec;
+    std::vector<bool> laserCloudOriCornerFlag;
+    std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation
+    std::vector<PointType> coeffSelSurfVec;
+    std::vector<bool> laserCloudOriSurfFlag;
+
+    map<int, pair<pcl::PointCloud<PointType>, pcl::PointCloud<PointType>>> laserCloudMapContainer;
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;
+
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;
+
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;
+
+    pcl::VoxelGrid<PointType> downSizeFilterCorner;
+    pcl::VoxelGrid<PointType> downSizeFilterSurf;
+    pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization
+
+    ros::Time timeLaserInfoStamp;
+    double timeLaserInfoCur;
+
+    float transformTobeMapped[6];
+
+    std::mutex mtx;
+    std::mutex mtxLoopInfo;
+
+    bool isDegenerate = false;
+    Eigen::Matrix<float, 6, 6> matP;
+
+    int laserCloudCornerFromMapDSNum = 0;
+    int laserCloudSurfFromMapDSNum = 0;
+    int laserCloudCornerLastDSNum = 0;
+    int laserCloudSurfLastDSNum = 0;
+    unsigned int mapFrameSize{0};
+    bool aLoopIsClosed = false;
+    bool getInitPose{false}; // get initPose to localization
+    map<int, int> loopIndexContainer; // from new to old
+    vector<pair<int, int>> loopIndexQueue;
+    vector<gtsam::Pose3> loopPoseQueue;
+    vector<gtsam::noiseModel::Diagonal::shared_ptr> loopNoiseQueue;
+    deque<std_msgs::Float64MultiArray> loopInfoVec;
+
+    nav_msgs::Path globalPath;
+
+    Eigen::Affine3f transPointAssociateToMap;
+    Eigen::Affine3f incrementalOdometryAffineFront;
+    Eigen::Affine3f incrementalOdometryAffineBack;
+
+
+    mapOptimization() {
+        ISAM2Params parameters;
+        parameters.relinearizeThreshold = 0.1;
+        parameters.relinearizeSkip = 1;
+        isam = new ISAM2(parameters);
+
+        surfPub = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectory", 1);
+        cornerPub = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectory", 1);
+        pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectoryPath", 1);
+        pubPathPoint = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/pathPoint", 1);
+        pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_global", 1);
+        pubLaserOdometryGlobal = nh.advertise<nav_msgs::Odometry>("lio_sam/mapping/odometry", 1);
+        pubLaserOdometryIncremental = nh.advertise<nav_msgs::Odometry>("lio_sam/mapping/odometry_incremental", 1);
+        pubPath = nh.advertise<nav_msgs::Path>("lio_sam/mapping/path", 1);
+
+        subCloud = nh.subscribe<lio_sam::cloud_info>("lio_sam/feature/cloud_info", 1,
+                                                     &mapOptimization::laserCloudInfoHandler, this,
+                                                     ros::TransportHints().tcpNoDelay());
+        subInitPose = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("initialpose", 1,
+                                                                             &mapOptimization::initialPoseHandler, this,
+                                                                             ros::TransportHints().tcpNoDelay());
+        subGPS = nh.subscribe<nav_msgs::Odometry>(gpsTopic, 200, &mapOptimization::gpsHandler, this,
+                                                  ros::TransportHints().tcpNoDelay());
+
+        pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_local", 1);
+        pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered", 1);
+        pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered_raw", 1);
+
+        downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);
+        downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
+        downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity,
+                                                      surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization
+
+        allocateMemory();
+        mapPath = std::getenv("HOME") + mapPath;
+        auto jsonFile = mapPath + "map.json";
+        loadKeyFrames(jsonFile);
+
+    }
+
+    void allocateMemory() {
+        localizationPath.reset(new pcl::PointCloud<PointType>());
+        localKeyPose3D.reset(new pcl::PointCloud<PointType>());
+        localKeyPose6D.reset(new pcl::PointCloud<PointTypePose>());
+        cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
+        cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
+        copy_cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
+        copy_cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
+
+        kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
+        kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
+
+        laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization
+        laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization
+        laserCloudCornerLastDS.reset(
+                new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization
+        laserCloudSurfLastDS.reset(
+                new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization
+
+        laserCloudOri.reset(new pcl::PointCloud<PointType>());
+        coeffSel.reset(new pcl::PointCloud<PointType>());
+
+        laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);
+        coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);
+        coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);
+
+        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
+        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
+
+        laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());
+        laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());
+        laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());
+        laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());
+
+        kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());
+        kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());
+
+        for (int i = 0; i < 6; ++i) {
+            transformTobeMapped[i] = 0;
+        }
+
+        matP.setZero();
+    }
+
+    void initialPoseHandler(const geometry_msgs::PoseWithCovarianceStampedConstPtr &msg) {
+        initPose6D.x = msg->pose.pose.position.x;
+        initPose6D.y = msg->pose.pose.position.y;
+        initPose6D.z = msg->pose.pose.position.z;
+
+        initPose3D.x = initPose6D.x;
+        initPose3D.y = initPose6D.y;
+        initPose3D.z = initPose6D.z;
+
+        tf::Quaternion quaternion;
+        tf::quaternionMsgToTF(msg->pose.pose.orientation, quaternion);
+        double roll, pitch, yaw;
+        tf::Matrix3x3(quaternion).getRPY(roll, pitch, yaw);
+        initPose6D.roll = roll;
+        initPose6D.pitch = pitch;
+        initPose6D.yaw = yaw;
+//        localKeyPose6D->push_back(initPose6D);
+//        localKeyPose3D->push_back(initPose3D);
+        ROS_INFO("get initPose %f %f %f | %f %f %f",
+                 initPose6D.x,
+                 initPose6D.y,
+                 initPose6D.z,
+                 initPose6D.roll,
+                 initPose6D.pitch,
+                 initPose6D.yaw);
+        getInitPose = true;
+    }
+
+    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr &msgIn) {
+        // extract time stamp
+        timeLaserInfoStamp = msgIn->header.stamp;
+        timeLaserInfoCur = msgIn->header.stamp.toSec();
+        // extract info and feature cloud
+        cloudInfo = *msgIn;
+        pcl::fromROSMsg(msgIn->cloud_corner, *laserCloudCornerLast);
+        pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);
+
+        std::lock_guard<std::mutex> lock(mtx);
+
+        static double timeLastProcessing = -1;
+        //点云当前的时间-上次处理的时间 大于 地图处理的时间间隔
+        double start = ros::Time::now().toSec();
+        if (timeLaserInfoCur - timeLastProcessing >= mappingProcessInterval and getInitPose) {
+
+            //上次处理时间改为当前时间 用于下一次处理时对比时间差
+            timeLastProcessing = timeLaserInfoCur;
+            //更新初始位姿估计
+            updateInitialGuess();
+            if (!extractSurroundingKeyFrames()) {
+                timeLastProcessing = -1;
+                return;
+            }
+            downsampleCurrentScan();
+            scan2MapOptimization();
+
+            saveKeyFramesAndFactor();
+
+            publishOdometry();
+
+            publishFrames();
+            double end = ros::Time::now().toSec();
+        }
+
+    }
+
+    void gpsHandler(const nav_msgs::Odometry::ConstPtr &gpsMsg) {
+        gpsQueue.push_back(*gpsMsg);
+    }
+
+    void pointAssociateToMap(PointType const *const pi, PointType *const po) {
+        //第i帧的点转换到第一帧坐标系下
+        po->x = transPointAssociateToMap(0, 0) * pi->x + transPointAssociateToMap(0, 1) * pi->y +
+                transPointAssociateToMap(0, 2) * pi->z + transPointAssociateToMap(0, 3);
+        po->y = transPointAssociateToMap(1, 0) * pi->x + transPointAssociateToMap(1, 1) * pi->y +
+                transPointAssociateToMap(1, 2) * pi->z + transPointAssociateToMap(1, 3);
+        po->z = transPointAssociateToMap(2, 0) * pi->x + transPointAssociateToMap(2, 1) * pi->y +
+                transPointAssociateToMap(2, 2) * pi->z + transPointAssociateToMap(2, 3);
+        po->intensity = pi->intensity;
+    }
+
+    //把点云数据根据tf变化,变换到目标坐标系下
+    pcl::PointCloud<PointType>::Ptr
+    transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose *transformIn) {
+        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+
+        PointType *pointFrom;
+
+        int cloudSize = cloudIn->size();
+        cloudOut->resize(cloudSize);
+        //获得变换矩阵
+        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z,
+                                                          transformIn->roll, transformIn->pitch, transformIn->yaw);
+
+#pragma omp parallel for num_threads(numberOfCores) //多线程运行for循环,提高运行效率
+        for (int i = 0; i < cloudSize; ++i) {
+            pointFrom = &cloudIn->points[i];
+            cloudOut->points[i].x =
+                    transCur(0, 0) * pointFrom->x + transCur(0, 1) * pointFrom->y + transCur(0, 2) * pointFrom->z +
+                    transCur(0, 3);
+            cloudOut->points[i].y =
+                    transCur(1, 0) * pointFrom->x + transCur(1, 1) * pointFrom->y + transCur(1, 2) * pointFrom->z +
+                    transCur(1, 3);
+            cloudOut->points[i].z =
+                    transCur(2, 0) * pointFrom->x + transCur(2, 1) * pointFrom->y + transCur(2, 2) * pointFrom->z +
+                    transCur(2, 3);
+            cloudOut->points[i].intensity = pointFrom->intensity;
+        }
+        return cloudOut;
+    }
+
+    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint) {
+        //返回用于gtsam的pose
+        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
+                            gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z)));
+    }
+
+    gtsam::Pose3 trans2gtsamPose(float transformIn[]) {
+        //由变换矩阵返回用于gtsam的pose
+        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),
+                            gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
+    }
+
+    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint) {
+        //返回仿射变换矩阵
+        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch,
+                                      thisPoint.yaw);
+    }
+
+    Eigen::Affine3f trans2Affine3f(float transformIn[]) {
+        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1],
+                                      transformIn[2]);
+    }
+
+    PointTypePose trans2PointTypePose(float transformIn[]) {
+        PointTypePose thisPose6D;
+        thisPose6D.x = transformIn[3];
+        thisPose6D.y = transformIn[4];
+        thisPose6D.z = transformIn[5];
+        thisPose6D.roll = transformIn[0];
+        thisPose6D.pitch = transformIn[1];
+        thisPose6D.yaw = transformIn[2];
+        return thisPose6D;
+    }
+
+    bool
+    ndtMatch(pcl::PointCloud<PointType>::Ptr map, pcl::PointCloud<PointType>::Ptr local, PointTypePose initPose) {
+        //初始化正态分布变换(NDT)
+        pcl::NormalDistributionsTransform<PointType, PointType> ndt;
+        //设置依赖尺度NDT参数
+        //为终止条件设置最小转换差异
+        ndt.setTransformationEpsilon(0.01);
+        //为More-Thuente线搜索设置最大步长
+        ndt.setStepSize(0.1);
+        //设置NDT网格结构的分辨率(VoxelGridCovariance)
+        ndt.setResolution(1.5);
+        //设置匹配迭代的最大次数
+        ndt.setMaximumIterations(35);
+        // 设置要配准的点云
+        ndt.setInputCloud(local);
+        //设置点云配准目标
+        ndt.setInputTarget(map);
+        Eigen::AngleAxisf init_rotation(initPose.yaw, Eigen::Vector3f::UnitZ());
+        Eigen::Translation3f init_translation(initPose.x, initPose.y, initPose.z);
+        Eigen::Matrix4f init_guess = (init_translation * init_rotation).matrix();
+        pcl::PointCloud<PointType>::Ptr output_cloud(new pcl::PointCloud<PointType>);
+        ndt.align(*output_cloud, init_guess);
+        ROS_INFO_STREAM("Normal Distributions Transform has converged:" << ndt.hasConverged()
+                                                                        << " score: " << ndt.getFitnessScore()
+                                                                        << std::endl);
+        Eigen::Transform<float, 3, Eigen::Affine> transFinal(ndt.getFinalTransformation());
+
+
+        pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4],
+                                          transformTobeMapped[5],
+                                          transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
+
+        savePCDDirectory = std::getenv("HOME") + savePCDDirectory;
+        pcl::io::savePCDFileASCII(savePCDDirectory + "map.pcd", *map);
+        pcl::io::savePCDFileASCII(savePCDDirectory + "local.pcd", *local);
+
+        ROS_INFO("score is %f", ndt.getFitnessScore());
+        ROS_INFO("info ndt %f %f %f %f %f %f",
+                 transformTobeMapped[0],
+                 transformTobeMapped[1],
+                 transformTobeMapped[2],
+                 transformTobeMapped[3],
+                 transformTobeMapped[4],
+                 transformTobeMapped[5]
+        );
+        if (ndt.getFitnessScore() > ndtMinScore) {
+            ROS_WARN("initPose error %f, please input initPose again", ndt.getFitnessScore());
+            getInitPose = false;
+            return false;
+        }
+
+        return true;
+    }
+
+    void visualizeGlobalMapThread() {
+
+        ros::Rate rate(0.2);
+        while (ros::ok()) {
+            publishGlobalMap();
+            rate.sleep();
+        }
+    }
+
+    void publishGlobalMap() {
+        if (pubLaserCloudSurround.getNumSubscribers() == 0)
+            return;
+
+        if (cloudKeyPoses3D->points.empty())
+            return;
+
+        pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());;
+        pcl::PointCloud<PointType>::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());
+
+        // kd-tree to find near key frames to visualize
+        std::vector<int> pointSearchIndGlobalMap;
+        std::vector<float> pointSearchSqDisGlobalMap;
+        // search near key frames to visualize
+        mtx.lock();
+        kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D);
+        kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->front(), globalMapVisualizationSearchRadius,
+                                      pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);
+        mtx.unlock();
+        for (int i = 0; i < (int) pointSearchIndGlobalMap.size(); ++i)
+            globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]);
+        // downsample near selected key frames
+        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses; // for global map visualization
+        downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity,
+                                                    globalMapVisualizationPoseDensity,
+                                                    globalMapVisualizationPoseDensity); // for global map visualization
+        downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);
+        downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);
+
+        // extract visualized and downsampled key frames
+        for (int i = 0; i < (int) globalMapKeyPosesDS->size(); ++i) {
+            if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) >
+                globalMapVisualizationSearchRadius)
+                continue;
+            int thisKeyInd = (int) globalMapKeyPosesDS->points[i].intensity;
+            *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],
+                                                        &cloudKeyPoses6D->points[thisKeyInd]);
+            *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd],
+                                                        &cloudKeyPoses6D->points[thisKeyInd]);
+        }
+        // downsample visualized points
+        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames; // for global map visualization
+        downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize,
+                                                     globalMapVisualizationLeafSize); // for global map visualization
+        downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);
+        downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);
+        publishCloud(&pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, mapFrame);
+    }
+
+    void updateInitialGuess() {
+        // save current transformation before any processing
+        //保存当前的变换 第一次全是0
+
+        incrementalOdometryAffineFront = trans2Affine3f(transformTobeMapped);
+
+        static Eigen::Affine3f lastImuTransformation;
+        // initialization 初始化第一次用imageProjection中获得的imu的欧拉角初始化变换参数  只进行一次
+        if (localKeyPose3D->points.empty()) {
+            transformTobeMapped[0] = initPose6D.roll;
+            transformTobeMapped[1] = initPose6D.pitch;
+            transformTobeMapped[2] = initPose6D.yaw;
+            if (!useImuHeadingInitialization)
+                transformTobeMapped[2] = 0;
+
+            lastImuTransformation = pcl::getTransformation(0, 0, 0, initPose6D.roll, initPose6D.pitch,
+                                                           initPose6D.yaw); // save imu before return;
+            return;
+        }
+        // use imu pre-integration estimation for pose guess
+        static bool lastImuPreTransAvailable = false;
+        static Eigen::Affine3f lastImuPreTransformation;
+        //imageProjection中根据imuPreintegration里发布的odom/imu_incremental里程计话题获得的初始估计,imu预计分里程机
+        return;
+        if (cloudInfo.odomAvailable == true) {
+            Eigen::Affine3f transBack = pcl::getTransformation(cloudInfo.initialGuessX, cloudInfo.initialGuessY,
+                                                               cloudInfo.initialGuessZ,
+                                                               cloudInfo.initialGuessRoll, cloudInfo.initialGuessPitch,
+                                                               cloudInfo.initialGuessYaw);
+            if (!lastImuPreTransAvailable) {
+                lastImuPreTransformation = transBack;
+                lastImuPreTransAvailable = true;
+            } else {
+                //上一次变换的逆乘此次的变换 获得增量
+                Eigen::Affine3f transIncre = lastImuPreTransformation.inverse() * transBack;
+                Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
+                //最终的变换 利用该变换给transformTobeMapped赋值
+                Eigen::Affine3f transFinal = transTobe * transIncre;
+                pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4],
+                                                  transformTobeMapped[5],
+                                                  transformTobeMapped[0], transformTobeMapped[1],
+                                                  transformTobeMapped[2]);
+                //更新最后的变换值
+                lastImuPreTransformation = transBack;
+
+                lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                               cloudInfo.imuYawInit); // save imu before return;
+                return;
+            }
+        }
+
+        // use imu incremental estimation for pose guess (only rotation)
+        //使用imu进行增量式的估计  上面这个与上面的代码的区别在于估计的数据来源 是利用imu发布的里程计的消息 这个是与初始化的那一步一样 纯靠imu原始数据
+        //只更新旋转量 步骤和上面的一致
+        //其实这里没太理解用两个方法进行估计的优势在哪
+        //同时大家需要关注transformTobeMapped这一项 后续十分关键
+        if (cloudInfo.imuAvailable == true) {
+            Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                               cloudInfo.imuYawInit);
+            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;
+
+            Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
+            Eigen::Affine3f transFinal = transTobe * transIncre;
+            pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4],
+                                              transformTobeMapped[5],
+                                              transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
+
+            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                           cloudInfo.imuYawInit); // save imu before return;
+            return;
+        }
+    }
+
+    bool extractNearby() {
+        //定义周围关键帧点云指针以及降采样后指针
+        pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>());
+        std::vector<int> pointSearchInd;
+        std::vector<float> pointSearchSqDis;
+
+        // extract all the nearby key poses and downsample them
+        //提取周围的所有关键帧并降采样
+        kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-
+        if (localKeyPose3D->empty()) {
+            kdtreeSurroundingKeyPoses->radiusSearch(initPose3D, (double) surroundingKeyframeSearchRadius,
+                                                    pointSearchInd, pointSearchSqDis);
+        } else {
+            kdtreeSurroundingKeyPoses->radiusSearch(localKeyPose3D->back(), (double) surroundingKeyframeSearchRadius,
+                                                    pointSearchInd, pointSearchSqDis);
+        }
+        for (int i = 0; i < (int) pointSearchInd.size(); ++i) {
+            int id = pointSearchInd[i];
+            surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]);
+        }
+
+        downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses);
+        downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS);
+
+        // also extract some latest key frames in case the robot rotates in one position
+        //提取了一些最新的关键帧,以防机器人在一个位置原地旋转
+        int numPoses = cloudKeyPoses3D->size();
+        for (int i = numPoses - 1; i >= 0; --i) {
+            if (timeLaserInfoCur - cloudKeyPoses6D->points[i].time < 10.0)
+                surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]);
+            else
+                break;
+        }
+
+        //对降采样后的点云进行提取出边缘点和平面点对应的localmap
+        if (!extractCloud(surroundingKeyPosesDS))
+            return false;
+        return true;
+    }
+
+    bool extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract) {
+        // fuse the map
+        laserCloudCornerFromMap->clear();
+        laserCloudSurfFromMap->clear();
+        for (int i = 0; i < (int) cloudToExtract->size(); ++i) {
+            if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)
+                continue;
+
+            int thisKeyInd = (int) cloudToExtract->points[i].intensity;
+            if (laserCloudMapContainer.find(thisKeyInd) != laserCloudMapContainer.end()) {
+                // transformed cloud available
+                *laserCloudCornerFromMap += laserCloudMapContainer[thisKeyInd].first;
+                *laserCloudSurfFromMap += laserCloudMapContainer[thisKeyInd].second;
+            } else {
+                // transformed cloud not available
+                pcl::PointCloud<PointType> laserCloudCornerTemp = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],
+                                                                                       &cloudKeyPoses6D->points[thisKeyInd]);
+                pcl::PointCloud<PointType> laserCloudSurfTemp = *transformPointCloud(surfCloudKeyFrames[thisKeyInd],
+                                                                                     &cloudKeyPoses6D->points[thisKeyInd]);
+                *laserCloudCornerFromMap += laserCloudCornerTemp;
+                *laserCloudSurfFromMap += laserCloudSurfTemp;
+                laserCloudMapContainer[thisKeyInd] = make_pair(laserCloudCornerTemp, laserCloudSurfTemp);
+            }
+
+        }
+        if (localKeyPose3D->empty()) {
+            if (!ndtMatch(laserCloudSurfFromMap, laserCloudSurfLast, initPose6D))
+                return false;
+        }
+        sensor_msgs::PointCloud2 surfPoint, cornerPoint;
+        pcl::toROSMsg(*laserCloudSurfFromMap, surfPoint);
+        surfPoint.header.frame_id = "map";
+        surfPub.publish(surfPoint);
+        pcl::toROSMsg(*laserCloudCornerFromMap, cornerPoint);
+        cornerPoint.header.frame_id = "map";
+        cornerPub.publish(cornerPoint);
+        // Downsample the surrounding corner key frames (or map)
+        downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
+        downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
+        laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();
+        // Downsample the surrounding surf key frames (or map)
+        downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
+        downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
+        laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();
+
+        // clear map cache if too large
+        if (laserCloudMapContainer.size() > 1000)
+
+            laserCloudMapContainer.clear();
+        return true;
+    }
+
+    bool extractSurroundingKeyFrames() {
+        if (cloudKeyPoses3D->points.empty())
+            return true;
+        if (!extractNearby())
+            return false;
+        return true;
+    }
+
+    void downsampleCurrentScan() {
+        // Downsample cloud from current scan
+        //对当前帧点云降采样  刚刚完成了周围关键帧的降采样
+        //大量的降采样工作无非是为了使点云稀疏化 加快匹配以及实时性要求
+        laserCloudCornerLastDS->clear();
+        downSizeFilterCorner.setInputCloud(laserCloudCornerLast);
+        downSizeFilterCorner.filter(*laserCloudCornerLastDS);
+        laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();
+
+        laserCloudSurfLastDS->clear();
+        downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
+        downSizeFilterSurf.filter(*laserCloudSurfLastDS);
+        laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();
+    }
+
+    std::string readfile(const char *filename) {
+        std::ifstream infile;
+        infile.open(filename);   //将文件流对象与文件连接起来
+        assert(infile.is_open());   //若失败,则输出错误消息,并终止程序运行
+
+        std::string s;
+        std::string data;
+        while (getline(infile, s)) {
+            data += s;
+        }
+        infile.close();
+        return data;
+    }
+
+    void loadKeyFrames(string mapPath) {
+        ROS_INFO("read map file!");
+        rapidjson::Document document;
+        auto jsonStr = readfile(mapPath.data());
+        if (document.Parse(jsonStr.data()).HasParseError()) {
+            std::cout << "error" << std::endl;
+            return;
+        }
+        ROS_INFO("read map success!");
+        auto name = document["name"].GetString();
+        mapFrameSize = document["frameSize"].GetInt();
+        ROS_INFO("map name %s, map size %d", name, mapFrameSize);
+        for (unsigned i = 0; i < document["keyPose3D"]["x"].Size(); i++) {
+            PointType thisPose3D;
+            thisPose3D.x = document["keyPose3D"]["x"][i].GetFloat();
+            thisPose3D.y = document["keyPose3D"]["y"][i].GetFloat();
+            thisPose3D.z = document["keyPose3D"]["z"][i].GetFloat();
+            thisPose3D.intensity = document["keyPose3D"]["intensity"][i].GetFloat();
+            cloudKeyPoses3D->push_back(thisPose3D);
+
+            PointTypePose thisPose6D;
+            thisPose6D.x = document["keyPose6D"]["x"][i].GetFloat();
+            thisPose6D.y = document["keyPose6D"]["y"][i].GetFloat();
+            thisPose6D.z = document["keyPose6D"]["z"][i].GetFloat();
+            thisPose6D.roll = document["keyPose6D"]["roll"][i].GetFloat();
+            thisPose6D.pitch = document["keyPose6D"]["pitch"][i].GetFloat();
+            thisPose6D.yaw = document["keyPose6D"]["yaw"][i].GetFloat();
+            thisPose6D.intensity = document["keyPose6D"]["intensity"][i].GetFloat();
+            thisPose6D.time = document["keyPose6D"]["time"][i].GetFloat();
+            cloudKeyPoses6D->push_back(thisPose6D);
+        }
+
+        for (unsigned i = 0; i < document["cornerPointFrames"].Size(); i++) {
+            pcl::PointCloud<PointType>::Ptr pointCloud(new pcl::PointCloud<PointType>());
+            for (unsigned j = 0; j < document["cornerPointFrames"][i]["x"].Size(); j++) {
+                PointType point;
+                point.x = document["cornerPointFrames"][i]["x"][j].GetFloat();
+                point.y = document["cornerPointFrames"][i]["y"][j].GetFloat();
+                point.z = document["cornerPointFrames"][i]["z"][j].GetFloat();
+                point.intensity = document["cornerPointFrames"][i]["intensity"][j].GetFloat();
+                pointCloud->push_back(point);
+            }
+            cornerCloudKeyFrames.push_back(pointCloud);
+        }
+
+        for (unsigned i = 0; i < document["surfPointFrames"].Size(); i++) {
+            pcl::PointCloud<PointType>::Ptr pointCloud(new pcl::PointCloud<PointType>());
+            for (unsigned int j = 0; j < document["surfPointFrames"][i]["x"].Size(); j++) {
+                PointType point;
+                point.x = document["surfPointFrames"][i]["x"][j].GetFloat();
+                point.y = document["surfPointFrames"][i]["y"][j].GetFloat();
+                point.z = document["surfPointFrames"][i]["z"][j].GetFloat();
+                point.intensity = document["surfPointFrames"][i]["intensity"][j].GetFloat();
+                pointCloud->push_back(point);
+            }
+            surfCloudKeyFrames.push_back(pointCloud);
+        }
+        ROS_INFO("map analysis end !!");
+    }
+
+    void updatePointAssociateToMap() {
+        transPointAssociateToMap = trans2Affine3f(transformTobeMapped);
+    }
+
+    void cornerOptimization() {
+
+        //实现transformTobeMapped的矩阵形式转换 下面调用的函数就一行就不展开了  工具类函数
+        updatePointAssociateToMap();
+
+#pragma omp parallel for num_threads(numberOfCores)
+        for (int i = 0; i < laserCloudCornerLastDSNum; i++) {
+            PointType pointOri, pointSel, coeff;
+            std::vector<int> pointSearchInd;
+            std::vector<float> pointSearchSqDis;
+
+            pointOri = laserCloudCornerLastDS->points[i];
+            //第i帧的点转换到第一帧坐标系下
+            pointAssociateToMap(&pointOri, &pointSel);
+            //kd树的最近搜索
+            kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
+
+            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));
+            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));
+            if (pointSearchSqDis[4] < 1.0) {
+                float cx = 0, cy = 0, cz = 0;
+                // 先求5个样本的平均值
+
+                for (int j = 0; j < 5; j++) {
+                    cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;
+                    cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;
+                    cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;
+                }
+                cx /= 5;
+                cy /= 5;
+                cz /= 5;
+                // 下面求矩阵matA1=[ax,ay,az]^t*[ax,ay,az]
+                // 更准确地说应该是在求协方差matA1
+                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
+                for (int j = 0; j < 5; j++) {
+                    float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;
+                    float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;
+                    float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;
+
+                    a11 += ax * ax;
+                    a12 += ax * ay;
+                    a13 += ax * az;
+                    a22 += ay * ay;
+                    a23 += ay * az;
+                    a33 += az * az;
+                }
+                a11 /= 5;
+                a12 /= 5;
+                a13 /= 5;
+                a22 /= 5;
+                a23 /= 5;
+                a33 /= 5;
+                // 求正交阵的特征值和特征向量
+                // 特征值:matD1,特征向量:matV1中  对应于LOAM论文里雷达建图 特征值与特征向量那块
+                matA1.at<float>(0, 0) = a11;
+                matA1.at<float>(0, 1) = a12;
+                matA1.at<float>(0, 2) = a13;
+                matA1.at<float>(1, 0) = a12;
+                matA1.at<float>(1, 1) = a22;
+                matA1.at<float>(1, 2) = a23;
+                matA1.at<float>(2, 0) = a13;
+                matA1.at<float>(2, 1) = a23;
+                matA1.at<float>(2, 2) = a33;
+
+                cv::eigen(matA1, matD1, matV1);
+                // 边缘:与较大特征值相对应的特征向量代表边缘线的方向(一大两小,大方向)
+                // 以下这一大块是在计算点到边缘的距离,最后通过系数s来判断是否距离很近
+                // 如果距离很近就认为这个点在边缘上,需要放到laserCloudOri中
+                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {
+
+                    float x0 = pointSel.x;
+                    float y0 = pointSel.y;
+                    float z0 = pointSel.z;
+                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);
+                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);
+                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);
+                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);
+                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);
+                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);
+                    // 这边是在求[(x0-x1),(y0-y1),(z0-z1)]与[(x0-x2),(y0-y2),(z0-z2)]叉乘得到的向量的模长
+                    // 这个模长是由0.2*V1[0]和点[x0,y0,z0]构成的平行四边形的面积
+                    // 因为[(x0-x1),(y0-y1),(z0-z1)]x[(x0-x2),(y0-y2),(z0-z2)]=[XXX,YYY,ZZZ],
+                    // [XXX,YYY,ZZZ]=[(y0-y1)(z0-z2)-(y0-y2)(z0-z1),-(x0-x1)(z0-z2)+(x0-x2)(z0-z1),(x0-x1)(y0-y2)-(x0-x2)(y0-y1)]
+                    float a012 = sqrt(((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) *
+                                      ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                      + ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) *
+                                        ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))
+                                      + ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)) *
+                                        ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)));
+
+                    float l12 = sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2));
+
+                    float la = ((y1 - y2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                + (z1 - z2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))) / a012 / l12;
+
+                    float lb = -((x1 - x2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                 - (z1 - z2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12;
+
+                    float lc = -((x1 - x2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))
+                                 + (y1 - y2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12;
+
+                    float ld2 = a012 / l12;
+
+                    float s = 1 - 0.9 * fabs(ld2);
+
+                    coeff.x = s * la;
+                    coeff.y = s * lb;
+                    coeff.z = s * lc;
+                    coeff.intensity = s * ld2;
+
+                    if (s > 0.1) {
+                        laserCloudOriCornerVec[i] = pointOri;
+                        coeffSelCornerVec[i] = coeff;
+                        laserCloudOriCornerFlag[i] = true;
+                    }
+                }
+            }
+        }
+    }
+
+    void surfOptimization() {
+        updatePointAssociateToMap();
+
+#pragma omp parallel for num_threads(numberOfCores)
+        for (int i = 0; i < laserCloudSurfLastDSNum; i++) {
+            PointType pointOri, pointSel, coeff;
+            std::vector<int> pointSearchInd;
+            std::vector<float> pointSearchSqDis;
+
+            pointOri = laserCloudSurfLastDS->points[i];
+            pointAssociateToMap(&pointOri, &pointSel);
+            kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
+
+            Eigen::Matrix<float, 5, 3> matA0;
+            Eigen::Matrix<float, 5, 1> matB0;
+            Eigen::Vector3f matX0;
+
+            matA0.setZero();
+            matB0.fill(-1);
+            matX0.setZero();
+
+            if (pointSearchSqDis[4] < 1.0) {
+                for (int j = 0; j < 5; j++) {
+                    matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;
+                    matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;
+                    matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;
+                }
+
+                matX0 = matA0.colPivHouseholderQr().solve(matB0);
+
+                float pa = matX0(0, 0);
+                float pb = matX0(1, 0);
+                float pc = matX0(2, 0);
+                float pd = 1;
+
+                float ps = sqrt(pa * pa + pb * pb + pc * pc);
+                pa /= ps;
+                pb /= ps;
+                pc /= ps;
+                pd /= ps;
+
+                bool planeValid = true;
+                for (int j = 0; j < 5; j++) {
+                    if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +
+                             pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +
+                             pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {
+                        planeValid = false;
+                        break;
+                    }
+                }
+
+                if (planeValid) {
+                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;
+
+                    float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x
+                                                              + pointSel.y * pointSel.y + pointSel.z * pointSel.z));
+
+                    coeff.x = s * pa;
+                    coeff.y = s * pb;
+                    coeff.z = s * pc;
+                    coeff.intensity = s * pd2;
+
+                    if (s > 0.1) {
+                        laserCloudOriSurfVec[i] = pointOri;
+                        coeffSelSurfVec[i] = coeff;
+                        laserCloudOriSurfFlag[i] = true;
+                    }
+                }
+            }
+        }
+    }
+
+    void combineOptimizationCoeffs() {
+        // combine corner coeffs
+        //将边缘点和平面点的多项式系数整合到laserCloudOri以及coffSel  用于LM优化
+        // combine corner coeffs
+        for (int i = 0; i < laserCloudCornerLastDSNum; ++i) {
+            if (laserCloudOriCornerFlag[i] == true) {
+                laserCloudOri->push_back(laserCloudOriCornerVec[i]);
+                coeffSel->push_back(coeffSelCornerVec[i]);
+            }
+        }
+        // combine surf coeffs
+        for (int i = 0; i < laserCloudSurfLastDSNum; ++i) {
+            if (laserCloudOriSurfFlag[i] == true) {
+                laserCloudOri->push_back(laserCloudOriSurfVec[i]);
+                coeffSel->push_back(coeffSelSurfVec[i]);
+            }
+        }
+        // reset flag for next iteration
+        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
+        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
+    }
+
+    bool LMOptimization(int iterCount) {
+        //LM优化基本与LEGO-LOAM一致  同时也是与LOAM里一致的思想
+        //由于LOAM里雷达的特殊坐标系 所以这里也转了一次
+        //个人认为在这转换坐标系是非常好的一个处理  在之前的处理中都是用常规的坐标系更容易理解 调试时也不会出现太多的坐标转换方面的错误
+        // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation
+        // lidar <- camera      ---     camera <- lidar
+        // x = z                ---     x = y
+        // y = x                ---     y = z
+        // z = y                ---     z = x
+        // roll = yaw           ---     roll = pitch
+        // pitch = roll         ---     pitch = yaw
+        // yaw = pitch          ---     yaw = roll
+
+        // lidar -> camera
+        float srx = sin(transformTobeMapped[1]);
+        float crx = cos(transformTobeMapped[1]);
+        float sry = sin(transformTobeMapped[2]);
+        float cry = cos(transformTobeMapped[2]);
+        float srz = sin(transformTobeMapped[0]);
+        float crz = cos(transformTobeMapped[0]);
+
+        int laserCloudSelNum = laserCloudOri->size();
+        if (laserCloudSelNum < 50) {
+            return false;
+        }
+
+        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));
+        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matP(6, 6, CV_32F, cv::Scalar::all(0));
+
+        PointType pointOri, coeff;
+
+        for (int i = 0; i < laserCloudSelNum; i++) {
+            // lidar -> camera
+            pointOri.x = laserCloudOri->points[i].y;
+            pointOri.y = laserCloudOri->points[i].z;
+            pointOri.z = laserCloudOri->points[i].x;
+            // lidar -> camera
+            coeff.x = coeffSel->points[i].y;
+            coeff.y = coeffSel->points[i].z;
+            coeff.z = coeffSel->points[i].x;
+            coeff.intensity = coeffSel->points[i].intensity;
+            // in camera
+            float arx = (crx * sry * srz * pointOri.x + crx * crz * sry * pointOri.y - srx * sry * pointOri.z) * coeff.x
+                        + (-srx * srz * pointOri.x - crz * srx * pointOri.y - crx * pointOri.z) * coeff.y
+                        + (crx * cry * srz * pointOri.x + crx * cry * crz * pointOri.y - cry * srx * pointOri.z) *
+                          coeff.z;
+
+            float ary = ((cry * srx * srz - crz * sry) * pointOri.x
+                         + (sry * srz + cry * crz * srx) * pointOri.y + crx * cry * pointOri.z) * coeff.x
+                        + ((-cry * crz - srx * sry * srz) * pointOri.x
+                           + (cry * srz - crz * srx * sry) * pointOri.y - crx * sry * pointOri.z) * coeff.z;
+
+            float arz =
+                    ((crz * srx * sry - cry * srz) * pointOri.x + (-cry * crz - srx * sry * srz) * pointOri.y) * coeff.x
+                    + (crx * crz * pointOri.x - crx * srz * pointOri.y) * coeff.y
+                    +
+                    ((sry * srz + cry * crz * srx) * pointOri.x + (crz * sry - cry * srx * srz) * pointOri.y) * coeff.z;
+            // lidar -> camera
+            matA.at<float>(i, 0) = arz;
+            matA.at<float>(i, 1) = arx;
+            matA.at<float>(i, 2) = ary;
+            //雅可比矩阵中距离对平移的偏导
+            matA.at<float>(i, 3) = coeff.z;
+            matA.at<float>(i, 4) = coeff.x;
+            matA.at<float>(i, 5) = coeff.y;
+            matB.at<float>(i, 0) = -coeff.intensity;
+        }
+        // 将矩阵由matA转置生成matAt
+        // 先进行计算,以便于后边调用 cv::solve求解
+        cv::transpose(matA, matAt);
+        matAtA = matAt * matA;
+        matAtB = matAt * matB;
+        // 利用高斯牛顿法进行求解,
+        // 高斯牛顿法的原型是J^(T)*J * delta(x) = -J*f(x)
+        // J是雅克比矩阵,这里是A,f(x)是优化目标,这里是-B(符号在给B赋值时候就放进去了)
+        // 通过QR分解的方式,求解matAtA*matX=matAtB,得到解matX
+        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);
+
+        if (iterCount == 0) {
+
+            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));
+
+            cv::eigen(matAtA, matE, matV);
+            matV.copyTo(matV2);
+
+            isDegenerate = false;
+            float eignThre[6] = {100, 100, 100, 100, 100, 100};
+            for (int i = 5; i >= 0; i--) {
+                if (matE.at<float>(0, i) < eignThre[i]) {
+                    for (int j = 0; j < 6; j++) {
+                        matV2.at<float>(i, j) = 0;
+                    }
+                    isDegenerate = true;
+                } else {
+                    break;
+                }
+            }
+            matP = matV.inv() * matV2;
+        }
+
+        if (isDegenerate) {
+            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
+            matX.copyTo(matX2);
+            matX = matP * matX2;
+        }
+
+        transformTobeMapped[0] += matX.at<float>(0, 0);
+        transformTobeMapped[1] += matX.at<float>(1, 0);
+        transformTobeMapped[2] += matX.at<float>(2, 0);
+        transformTobeMapped[3] += matX.at<float>(3, 0);
+        transformTobeMapped[4] += matX.at<float>(4, 0);
+        transformTobeMapped[5] += matX.at<float>(5, 0);
+
+        float deltaR = sqrt(
+                pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +
+                pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +
+                pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
+        float deltaT = sqrt(
+                pow(matX.at<float>(3, 0) * 100, 2) +
+                pow(matX.at<float>(4, 0) * 100, 2) +
+                pow(matX.at<float>(5, 0) * 100, 2));
+
+        // 旋转或者平移量足够小就停止这次迭代过程
+        if (deltaR < 0.05 && deltaT < 0.05) {
+            return true; // converged
+        }
+        return false; // keep optimizing
+    }
+
+    void scan2MapOptimization() {
+        //根据现有地图与最新点云数据进行配准从而更新机器人精确位姿与融合建图,
+        //它分为角点优化、平面点优化、配准与更新等部分。
+        //   优化的过程与里程计的计算类似,是通过计算点到直线或平面的距离,构建优化公式再用LM法求解。
+        static bool firstRun{true}; //第一次不匹配,需要初始化坐标
+        if (cloudKeyPoses3D->points.empty() or firstRun) {
+            firstRun = false;
+            return;
+        }
+
+        //10 100最小点数
+        if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum) {
+            //构建kdTree
+            kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);
+            kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);
+            //迭代30次
+            for (int iterCount = 0; iterCount < 30; iterCount++) {
+                laserCloudOri->clear();
+                coeffSel->clear();
+                //边缘点匹配优化
+                cornerOptimization();
+
+                //平面点匹配优化
+                surfOptimization();
+
+                //组合优化多项式系数
+                combineOptimizationCoeffs();
+
+                if (LMOptimization(iterCount))
+                    break;
+            }
+
+            transformUpdate();
+        } else {
+            ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum,
+                     laserCloudSurfLastDSNum);
+        }
+    }
+
+    void transformUpdate() {
+        //cloudInfo imu这个变量在imageProjection中被改变
+        if (cloudInfo.imuAvailable == true) {
+            if (std::abs(cloudInfo.imuPitchInit) < 1.4) {
+                //imu数据的权重约束
+                double imuWeight = imuRPYWeight;
+                tf::Quaternion imuQuaternion;
+                tf::Quaternion transformQuaternion;
+                double rollMid, pitchMid, yawMid;
+
+                // slerp roll
+                //将imu的值与上一步优化得到的transformTobeMapped融合更新
+                transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);
+                imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
+                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
+                transformTobeMapped[0] = rollMid;
+
+                // slerp pitch
+                transformQuaternion.setRPY(0, transformTobeMapped[1], 0);
+                imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
+                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
+                transformTobeMapped[1] = pitchMid;
+            }
+        }
+
+        transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);
+        transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);
+        transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);
+
+        incrementalOdometryAffineBack = trans2Affine3f(transformTobeMapped);
+    }
+
+    float constraintTransformation(float value, float limit) {
+        if (value < -limit)
+            value = -limit;
+        if (value > limit)
+            value = limit;
+
+        return value;
+    }
+
+    bool saveFrame() {
+        if (localKeyPose3D->points.empty()) { //判断位置列表是否为空
+            return true;
+        }
+
+        Eigen::Affine3f transStart = pclPointToAffine3f(localKeyPose6D->back());
+        Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4],
+                                                            transformTobeMapped[5],
+                                                            transformTobeMapped[0], transformTobeMapped[1],
+                                                            transformTobeMapped[2]);
+        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
+        float x, y, z, roll, pitch, yaw;
+        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);
+
+        if (abs(roll) < surroundingkeyframeAddingAngleThreshold &&
+            abs(pitch) < surroundingkeyframeAddingAngleThreshold &&
+            abs(yaw) < surroundingkeyframeAddingAngleThreshold &&
+            sqrt(x * x + y * y + z * z) < surroundingkeyframeAddingDistThreshold)
+            return false;
+
+        return true;
+    }
+
+    void addOdomFactor() {
+        if (localKeyPose3D->points.empty()) {
+            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances(
+                    (Vector(6) << 1e-2, 1e-2, M_PI * M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter
+            gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));
+            initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));
+        } else {
+            noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances(
+                    (Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
+            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(localKeyPose6D->points.back());
+            gtsam::Pose3 poseTo = trans2gtsamPose(transformTobeMapped);
+            gtSAMgraph.add(
+                    BetweenFactor<Pose3>(localKeyPose3D->size() - 1, localKeyPose3D->size(), poseFrom.between(poseTo),
+                                         odometryNoise));
+            initialEstimate.insert(localKeyPose3D->size(), poseTo);
+        }
+    }
+
+    void addGPSFactor() {
+        if (gpsQueue.empty())
+            return;
+
+        // wait for system initialized and settles down
+        if (localKeyPose3D->points.empty())
+            return;
+        else {
+            if (pointDistance(localKeyPose3D->front(), localKeyPose3D->back()) < 5.0)
+                return;
+        }
+
+        // pose covariance small, no need to correct
+        if (poseCovariance(3, 3) < poseCovThreshold && poseCovariance(4, 4) < poseCovThreshold)
+            return;
+
+        // last gps position
+        static PointType lastGPSPoint;
+
+        while (!gpsQueue.empty()) {
+            if (gpsQueue.front().header.stamp.toSec() < timeLaserInfoCur - 0.2) {
+                // message too old
+                gpsQueue.pop_front();
+            } else if (gpsQueue.front().header.stamp.toSec() > timeLaserInfoCur + 0.2) {
+                // message too new
+                break;
+            } else {
+                nav_msgs::Odometry thisGPS = gpsQueue.front();
+                gpsQueue.pop_front();
+
+                // GPS too noisy, skip
+                float noise_x = thisGPS.pose.covariance[0];
+                float noise_y = thisGPS.pose.covariance[7];
+                float noise_z = thisGPS.pose.covariance[14];
+                if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold)
+                    continue;
+
+                float gps_x = thisGPS.pose.pose.position.x;
+                float gps_y = thisGPS.pose.pose.position.y;
+                float gps_z = thisGPS.pose.pose.position.z;
+                if (!useGpsElevation) {
+                    gps_z = transformTobeMapped[5];
+                    noise_z = 0.01;
+                }
+
+                // GPS not properly initialized (0,0,0)
+                if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)
+                    continue;
+
+                // Add GPS every a few meters
+                PointType curGPSPoint;
+                curGPSPoint.x = gps_x;
+                curGPSPoint.y = gps_y;
+                curGPSPoint.z = gps_z;
+                if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)
+                    continue;
+                else
+                    lastGPSPoint = curGPSPoint;
+
+                gtsam::Vector Vector3(3);
+                Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);
+                noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3);
+                gtsam::GPSFactor gps_factor(localKeyPose3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise);
+                gtSAMgraph.add(gps_factor);
+
+                aLoopIsClosed = true;
+                break;
+            }
+        }
+    }
+
+    void saveKeyFramesAndFactor() {
+        if (!saveFrame())
+            return;
+        // odom factor
+        addOdomFactor();
+        // update iSAM
+        isam->update(gtSAMgraph, initialEstimate);
+        isam->update();
+        gtSAMgraph.resize(0);
+        initialEstimate.clear();
+        //save key poses
+        PointType thisPose3D;
+        PointTypePose thisPose6D;
+        Pose3 latestEstimate;
+
+        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
+        pcl::copyPointCloud(*laserCloudCornerLastDS, *thisCornerKeyFrame);
+        pcl::copyPointCloud(*laserCloudSurfLastDS, *thisSurfKeyFrame);
+        if (cornerCloudKeyFrames.size() > mapFrameSize + addKeyFameNum) {
+            cornerCloudKeyFrames.erase(std::begin(cornerCloudKeyFrames) + mapFrameSize);
+            surfCloudKeyFrames.erase(std::begin(surfCloudKeyFrames) + mapFrameSize);
+            cloudKeyPoses3D->erase(cloudKeyPoses3D->begin() + mapFrameSize);
+            cloudKeyPoses6D->erase(cloudKeyPoses6D->begin() + mapFrameSize);
+            for (unsigned int i = mapFrameSize; i < mapFrameSize + addKeyFameNum - 1; i++) {
+                cloudKeyPoses3D->points[i].intensity = i;
+                cloudKeyPoses6D->points[i].intensity = i;
+            }
+        }
+
+        cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
+        surfCloudKeyFrames.push_back(thisSurfKeyFrame);
+        isamCurrentEstimate = isam->calculateEstimate();
+        latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size() - 1);
+        thisPose3D.x = latestEstimate.translation().x();
+        thisPose3D.y = latestEstimate.translation().y();
+        thisPose3D.z = latestEstimate.translation().z();
+        thisPose3D.intensity = cloudKeyPoses3D->size();
+        cloudKeyPoses3D->push_back(thisPose3D);
+        localKeyPose3D->push_back(thisPose3D);
+        thisPose6D.x = thisPose3D.x;
+        thisPose6D.y = thisPose3D.y;
+        thisPose6D.z = thisPose3D.z;
+        thisPose6D.intensity = thisPose3D.intensity; // this can be used as index
+        thisPose6D.roll = latestEstimate.rotation().roll();
+        thisPose6D.pitch = latestEstimate.rotation().pitch();
+        thisPose6D.yaw = latestEstimate.rotation().yaw();
+        thisPose6D.time = timeLaserInfoCur;
+        cloudKeyPoses6D->push_back(thisPose6D);
+        localKeyPose6D->push_back(thisPose6D);
+        poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size() - 1);
+
+        // save updated transform
+        transformTobeMapped[0] = latestEstimate.rotation().roll();
+        transformTobeMapped[1] = latestEstimate.rotation().pitch();
+        transformTobeMapped[2] = latestEstimate.rotation().yaw();
+        transformTobeMapped[3] = latestEstimate.translation().x();
+        transformTobeMapped[4] = latestEstimate.translation().y();
+        transformTobeMapped[5] = latestEstimate.translation().z();
+        // save path for visualization
+        updatePath(thisPose6D);
+
+    }
+
+    void updatePath(const PointTypePose &pose_in) {
+        geometry_msgs::PoseStamped pose_stamped;
+        pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);
+        pose_stamped.header.frame_id = odometryFrame;
+        pose_stamped.pose.position.x = pose_in.x;
+        pose_stamped.pose.position.y = pose_in.y;
+        pose_stamped.pose.position.z = pose_in.z;
+        tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
+        pose_stamped.pose.orientation.x = q.x();
+        pose_stamped.pose.orientation.y = q.y();
+        pose_stamped.pose.orientation.z = q.z();
+        pose_stamped.pose.orientation.w = q.w();
+
+        globalPath.poses.push_back(pose_stamped);
+    }
+
+    void publishOdometry() {
+        // Publish odometry for ROS (global)
+        nav_msgs::Odometry laserOdometryROS;
+        laserOdometryROS.header.stamp = timeLaserInfoStamp;
+        laserOdometryROS.header.frame_id = odometryFrame;
+        laserOdometryROS.child_frame_id = "odom_mapping";
+        laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];
+        laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];
+        laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];
+        laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0],
+                                                                                         transformTobeMapped[1],
+                                                                                         transformTobeMapped[2]);
+        pubLaserOdometryGlobal.publish(laserOdometryROS);
+
+        // Publish TF
+        static tf::TransformBroadcaster br;
+        tf::Transform t_odom_to_lidar = tf::Transform(
+                tf::createQuaternionFromRPY(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]),
+                tf::Vector3(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5]));
+        tf::StampedTransform trans_odom_to_lidar = tf::StampedTransform(t_odom_to_lidar, timeLaserInfoStamp,
+                                                                        odometryFrame, "lidar_link");
+        br.sendTransform(trans_odom_to_lidar);
+
+        // Publish odometry for ROS (incremental)
+        static bool lastIncreOdomPubFlag = false;
+        static nav_msgs::Odometry laserOdomIncremental; // incremental odometry msg
+        static Eigen::Affine3f increOdomAffine; // incremental odometry in affine
+        if (!lastIncreOdomPubFlag) {
+            lastIncreOdomPubFlag = true;
+            laserOdomIncremental = laserOdometryROS;
+            increOdomAffine = trans2Affine3f(transformTobeMapped);
+        } else {
+            Eigen::Affine3f affineIncre = incrementalOdometryAffineFront.inverse() * incrementalOdometryAffineBack;
+            increOdomAffine = increOdomAffine * affineIncre;
+            float x, y, z, roll, pitch, yaw;
+            pcl::getTranslationAndEulerAngles(increOdomAffine, x, y, z, roll, pitch, yaw);
+            if (cloudInfo.imuAvailable == true) {
+                if (std::abs(cloudInfo.imuPitchInit) < 1.4) {
+                    double imuWeight = 0.1;
+                    tf::Quaternion imuQuaternion;
+                    tf::Quaternion transformQuaternion;
+                    double rollMid, pitchMid, yawMid;
+
+                    // slerp roll
+                    transformQuaternion.setRPY(roll, 0, 0);
+                    imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
+                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid,
+                                                                                              yawMid);
+                    roll = rollMid;
+
+                    // slerp pitch
+                    transformQuaternion.setRPY(0, pitch, 0);
+                    imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
+                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid,
+                                                                                              yawMid);
+                    pitch = pitchMid;
+                }
+            }
+            laserOdomIncremental.header.stamp = timeLaserInfoStamp;
+            laserOdomIncremental.header.frame_id = odometryFrame;
+            laserOdomIncremental.child_frame_id = "odom_mapping";
+            laserOdomIncremental.pose.pose.position.x = x;
+            laserOdomIncremental.pose.pose.position.y = y;
+            laserOdomIncremental.pose.pose.position.z = z;
+            laserOdomIncremental.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
+            if (isDegenerate)
+                laserOdomIncremental.pose.covariance[0] = 1;
+            else
+                laserOdomIncremental.pose.covariance[0] = 0;
+        }
+        pubLaserOdometryIncremental.publish(laserOdomIncremental);
+    }
+
+    void publishFrames() {
+        if (cloudKeyPoses3D->points.empty())
+            return;
+        // publish key poses
+        PointType localPose;
+        localPose.x = transformTobeMapped[3];
+        localPose.y = transformTobeMapped[4];
+        localPose.z = transformTobeMapped[5];
+        localizationPath->push_back(localPose);
+        publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, odometryFrame);
+        publishCloud(&pubPathPoint, localizationPath, timeLaserInfoStamp, odometryFrame);
+        // Publish surrounding key frames
+        publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, odometryFrame);
+        // publish registered key frame
+        if (pubRecentKeyFrame.getNumSubscribers() != 0) {
+            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
+            *cloudOut += *transformPointCloud(laserCloudCornerLastDS, &thisPose6D);
+            *cloudOut += *transformPointCloud(laserCloudSurfLastDS, &thisPose6D);
+            publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, odometryFrame);
+        }
+        // publish registered high-res raw cloud
+        if (pubCloudRegisteredRaw.getNumSubscribers() != 0) {
+            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+            pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);
+            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
+            *cloudOut = *transformPointCloud(cloudOut, &thisPose6D);
+            publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, odometryFrame);
+        }
+        // publish path
+        if (pubPath.getNumSubscribers() != 0) {
+            globalPath.header.stamp = timeLaserInfoStamp;
+            globalPath.header.frame_id = odometryFrame;
+            pubPath.publish(globalPath);
+        }
+    }
+};
+
+
+int main(int argc, char **argv) {
+    ros::init(argc, argv, "lio_sam");
+
+    mapOptimization MO;
+
+    ROS_INFO("\033[1;32m----> Map localization Started.\033[0m");
+
+    std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);
+    ros::spin();
+    visualizeMapThread.join();
+
+    return 0;
+}

+ 1842 - 0
src/mapOptmization.cpp

@@ -0,0 +1,1842 @@
+#include "utility.h"
+#include "lio_sam/cloud_info.h"
+
+#include <gtsam/geometry/Rot3.h>
+#include <gtsam/geometry/Pose3.h>
+#include <gtsam/slam/PriorFactor.h>
+#include <gtsam/slam/BetweenFactor.h>
+#include <gtsam/navigation/GPSFactor.h>
+#include <gtsam/navigation/ImuFactor.h>
+#include <gtsam/navigation/CombinedImuFactor.h>
+#include <gtsam/nonlinear/NonlinearFactorGraph.h>
+#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
+#include <gtsam/nonlinear/Marginals.h>
+#include <gtsam/nonlinear/Values.h>
+#include <gtsam/inference/Symbol.h>
+
+#include <gtsam/nonlinear/ISAM2.h>
+#include <jsoncpp/json/json.h>
+#include "rapidjson/writer.h"
+#include "rapidjson/ostreamwrapper.h"
+#include <rapidjson/document.h>
+
+using namespace gtsam;
+
+using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
+using symbol_shorthand::V; // Vel   (xdot,ydot,zdot)
+using symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)
+using symbol_shorthand::G; // GPS pose
+
+/*
+    * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)
+    */
+struct PointXYZIRPYT {
+    PCL_ADD_POINT4D
+
+    PCL_ADD_INTENSITY;                  // preferred way of adding a XYZ+padding
+    float roll;
+    float pitch;
+    float yaw;
+    double time;
+
+    EIGEN_MAKE_ALIGNED_OPERATOR_NEW   // make sure our new allocators are aligned
+} EIGEN_ALIGN16;                    // enforce SSE padding for correct memory alignment
+
+POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,
+                                   (float, x, x)(float, y, y)
+                                           (float, z, z)(float, intensity, intensity)
+                                           (float, roll, roll)(float, pitch, pitch)(float, yaw, yaw)
+                                           (double, time, time))
+
+typedef PointXYZIRPYT PointTypePose;
+
+
+class mapOptimization : public ParamServer {
+
+public:
+
+    // gtsam
+    NonlinearFactorGraph gtSAMgraph;
+    Values initialEstimate;
+    Values optimizedEstimate;
+    ISAM2 *isam;
+    Values isamCurrentEstimate;
+    Eigen::MatrixXd poseCovariance;
+
+    ros::Publisher pubLaserCloudSurround;
+    ros::Publisher pubLaserOdometryGlobal;
+    ros::Publisher pubLaserOdometryIncremental;
+    ros::Publisher pubKeyPoses;
+    ros::Publisher pubPath;
+
+    ros::Publisher pubHistoryKeyFrames;
+    ros::Publisher pubIcpKeyFrames;
+    ros::Publisher pubRecentKeyFrames;
+    ros::Publisher pubRecentKeyFrame;
+    ros::Publisher pubCloudRegisteredRaw;
+    ros::Publisher pubLoopConstraintEdge;
+
+    ros::Subscriber subCloud;
+    ros::Subscriber subGPS;
+    ros::Subscriber subLoop;
+
+    std::deque<nav_msgs::Odometry> gpsQueue;
+    lio_sam::cloud_info cloudInfo;
+
+    vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;
+    vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;
+
+    pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;
+    pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;
+    pcl::PointCloud<PointType>::Ptr copy_cloudKeyPoses3D;
+    pcl::PointCloud<PointTypePose>::Ptr copy_cloudKeyPoses6D;
+
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization
+
+    pcl::PointCloud<PointType>::Ptr laserCloudOri;
+    pcl::PointCloud<PointType>::Ptr coeffSel;
+
+    std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation
+    std::vector<PointType> coeffSelCornerVec;
+    std::vector<bool> laserCloudOriCornerFlag;
+    std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation
+    std::vector<PointType> coeffSelSurfVec;
+    std::vector<bool> laserCloudOriSurfFlag;
+
+    map<int, pair<pcl::PointCloud<PointType>, pcl::PointCloud<PointType>>> laserCloudMapContainer;
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;
+    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;
+    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;
+
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;
+
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;
+    pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;
+
+    pcl::VoxelGrid<PointType> downSizeFilterCorner;
+    pcl::VoxelGrid<PointType> downSizeFilterSurf;
+    pcl::VoxelGrid<PointType> downSizeFilterICP;
+    pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization
+
+    ros::Time timeLaserInfoStamp;
+    double timeLaserInfoCur;
+
+    float transformTobeMapped[6];
+
+    std::mutex mtx;
+    std::mutex mtxLoopInfo;
+
+    bool isDegenerate = false;
+    Eigen::Matrix<float, 6, 6> matP;
+
+    int laserCloudCornerFromMapDSNum = 0;
+    int laserCloudSurfFromMapDSNum = 0;
+    int laserCloudCornerLastDSNum = 0;
+    int laserCloudSurfLastDSNum = 0;
+
+    bool aLoopIsClosed = false;
+    map<int, int> loopIndexContainer; // from new to old
+    vector<pair<int, int>> loopIndexQueue;
+    vector<gtsam::Pose3> loopPoseQueue;
+    vector<gtsam::noiseModel::Diagonal::shared_ptr> loopNoiseQueue;
+    deque<std_msgs::Float64MultiArray> loopInfoVec;
+
+    nav_msgs::Path globalPath;
+
+    Eigen::Affine3f transPointAssociateToMap;
+    Eigen::Affine3f incrementalOdometryAffineFront;
+    Eigen::Affine3f incrementalOdometryAffineBack;
+
+
+    mapOptimization() {
+        ISAM2Params parameters;
+        parameters.relinearizeThreshold = 0.1;
+        parameters.relinearizeSkip = 1;
+        isam = new ISAM2(parameters);
+
+        pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectory", 1);
+        pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_global", 1);
+        pubLaserOdometryGlobal = nh.advertise<nav_msgs::Odometry>("lio_sam/mapping/odometry", 1);
+        pubLaserOdometryIncremental = nh.advertise<nav_msgs::Odometry>("lio_sam/mapping/odometry_incremental", 1);
+        pubPath = nh.advertise<nav_msgs::Path>("lio_sam/mapping/path", 1);
+
+        subCloud = nh.subscribe<lio_sam::cloud_info>("lio_sam/feature/cloud_info", 1,
+                                                     &mapOptimization::laserCloudInfoHandler, this,
+                                                     ros::TransportHints().tcpNoDelay());
+        subGPS = nh.subscribe<nav_msgs::Odometry>(gpsTopic, 200, &mapOptimization::gpsHandler, this,
+                                                  ros::TransportHints().tcpNoDelay());
+        subLoop = nh.subscribe<std_msgs::Float64MultiArray>("lio_loop/loop_closure_detection", 1,
+                                                            &mapOptimization::loopInfoHandler, this,
+                                                            ros::TransportHints().tcpNoDelay());
+
+        pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_history_cloud",
+                                                                     1);
+        pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_corrected_cloud", 1);
+        pubLoopConstraintEdge = nh.advertise<visualization_msgs::MarkerArray>(
+                "/lio_sam/mapping/loop_closure_constraints", 1);
+
+        pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_local", 1);
+        pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered", 1);
+        pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered_raw", 1);
+
+        downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);
+        downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
+        downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
+        downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity,
+                                                      surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization
+
+        allocateMemory();
+    }
+
+    void allocateMemory() {
+        cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
+        cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
+        copy_cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
+        copy_cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
+
+        kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
+        kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
+
+        laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization
+        laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization
+        laserCloudCornerLastDS.reset(
+                new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization
+        laserCloudSurfLastDS.reset(
+                new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization
+
+        laserCloudOri.reset(new pcl::PointCloud<PointType>());
+        coeffSel.reset(new pcl::PointCloud<PointType>());
+
+        laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);
+        coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);
+        coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);
+        laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);
+
+        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
+        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
+
+        laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());
+        laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());
+        laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());
+        laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());
+
+        kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());
+        kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());
+
+        for (int i = 0; i < 6; ++i) {
+            transformTobeMapped[i] = 0;
+        }
+        matP.setZero();
+    }
+
+    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr &msgIn) {
+        // extract time stamp
+        timeLaserInfoStamp = msgIn->header.stamp;
+        timeLaserInfoCur = msgIn->header.stamp.toSec();
+
+        // extract info and feature cloud
+        cloudInfo = *msgIn;
+        pcl::fromROSMsg(msgIn->cloud_corner, *laserCloudCornerLast);
+        pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);
+
+        std::lock_guard<std::mutex> lock(mtx);
+
+        static double timeLastProcessing = -1;
+        if (timeLaserInfoCur - timeLastProcessing >= mappingProcessInterval) {
+            timeLastProcessing = timeLaserInfoCur;
+//            ROS_INFO("info %f %f %f",
+//                     transformTobeMapped[3],
+//                     transformTobeMapped[4],
+//                     transformTobeMapped[2]
+//                     );
+            updateInitialGuess();
+//            ROS_INFO("info2 %f %f %f",
+//                     transformTobeMapped[3],
+//                     transformTobeMapped[4],
+//                     transformTobeMapped[2]
+//            );
+            extractSurroundingKeyFrames();
+            downsampleCurrentScan();
+            scan2MapOptimization();
+//            ROS_INFO("info3 %f %f %f",
+//                     transformTobeMapped[3],
+//                     transformTobeMapped[4],
+//                     transformTobeMapped[2]
+//            );
+            saveKeyFramesAndFactor();
+            correctPoses();
+            publishOdometry();
+            publishFrames();
+        }
+    }
+
+    void gpsHandler(const nav_msgs::Odometry::ConstPtr &gpsMsg) {
+        gpsQueue.push_back(*gpsMsg);
+    }
+
+    void pointAssociateToMap(PointType const *const pi, PointType *const po) {
+        po->x = transPointAssociateToMap(0, 0) * pi->x + transPointAssociateToMap(0, 1) * pi->y +
+                transPointAssociateToMap(0, 2) * pi->z + transPointAssociateToMap(0, 3);
+        po->y = transPointAssociateToMap(1, 0) * pi->x + transPointAssociateToMap(1, 1) * pi->y +
+                transPointAssociateToMap(1, 2) * pi->z + transPointAssociateToMap(1, 3);
+        po->z = transPointAssociateToMap(2, 0) * pi->x + transPointAssociateToMap(2, 1) * pi->y +
+                transPointAssociateToMap(2, 2) * pi->z + transPointAssociateToMap(2, 3);
+        po->intensity = pi->intensity;
+    }
+
+    pcl::PointCloud<PointType>::Ptr
+    transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose *transformIn) {
+        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+
+        PointType *pointFrom;
+
+        int cloudSize = cloudIn->size();
+        cloudOut->resize(cloudSize);
+
+        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z,
+                                                          transformIn->roll, transformIn->pitch, transformIn->yaw);
+
+#pragma omp parallel for num_threads(numberOfCores)
+        for (int i = 0; i < cloudSize; ++i) {
+            pointFrom = &cloudIn->points[i];
+            cloudOut->points[i].x =
+                    transCur(0, 0) * pointFrom->x + transCur(0, 1) * pointFrom->y + transCur(0, 2) * pointFrom->z +
+                    transCur(0, 3);
+            cloudOut->points[i].y =
+                    transCur(1, 0) * pointFrom->x + transCur(1, 1) * pointFrom->y + transCur(1, 2) * pointFrom->z +
+                    transCur(1, 3);
+            cloudOut->points[i].z =
+                    transCur(2, 0) * pointFrom->x + transCur(2, 1) * pointFrom->y + transCur(2, 2) * pointFrom->z +
+                    transCur(2, 3);
+            cloudOut->points[i].intensity = pointFrom->intensity;
+        }
+        return cloudOut;
+    }
+
+    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint) {
+        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
+                            gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z)));
+    }
+
+    gtsam::Pose3 trans2gtsamPose(float transformIn[]) {
+        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),
+                            gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
+    }
+
+    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint) {
+        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch,
+                                      thisPoint.yaw);
+    }
+
+    Eigen::Affine3f trans2Affine3f(float transformIn[]) {
+        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1],
+                                      transformIn[2]);
+    }
+
+    PointTypePose trans2PointTypePose(float transformIn[]) {
+        PointTypePose thisPose6D;
+        thisPose6D.x = transformIn[3];
+        thisPose6D.y = transformIn[4];
+        thisPose6D.z = transformIn[5];
+        thisPose6D.roll = transformIn[0];
+        thisPose6D.pitch = transformIn[1];
+        thisPose6D.yaw = transformIn[2];
+        return thisPose6D;
+    }
+
+
+    void visualizeGlobalMapThread() {
+        ros::Rate rate(0.2);
+        while (ros::ok()) {
+            rate.sleep();
+            publishGlobalMap();
+        }
+
+        if (!savePCD and !saveJson)
+            return;
+
+        cout << "****************************************************" << endl;
+        cout << "Saving map to pcd files ..." << endl;
+        savePCDDirectory = std::getenv("HOME") + savePCDDirectory;
+        system((std::string("exec rm -r ") + savePCDDirectory).c_str());
+        system((std::string("mkdir ") + savePCDDirectory).c_str());
+        if (savePCD) {
+            // create directory and remove old files;
+            // save key frame transformations
+            pcl::io::savePCDFileASCII(savePCDDirectory + "trajectory.pcd", *cloudKeyPoses3D);
+            pcl::io::savePCDFileASCII(savePCDDirectory + "transformations.pcd", *cloudKeyPoses6D);
+            // extract global point cloud map
+            pcl::PointCloud<PointType>::Ptr globalCornerCloud(new pcl::PointCloud<PointType>());
+            pcl::PointCloud<PointType>::Ptr globalCornerCloudDS(new pcl::PointCloud<PointType>());
+            pcl::PointCloud<PointType>::Ptr globalSurfCloud(new pcl::PointCloud<PointType>());
+            pcl::PointCloud<PointType>::Ptr globalSurfCloudDS(new pcl::PointCloud<PointType>());
+            pcl::PointCloud<PointType>::Ptr globalMapCloud(new pcl::PointCloud<PointType>());
+            for (int i = 0; i < (int) cloudKeyPoses3D->size(); i++) {
+                *globalCornerCloud += *transformPointCloud(cornerCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
+                *globalSurfCloud += *transformPointCloud(surfCloudKeyFrames[i], &cloudKeyPoses6D->points[i]);
+                cout << "\r" << std::flush << "Processing feature cloud " << i << " of " << cloudKeyPoses6D->size()
+                     << " ...";
+            }
+            // down-sample and save corner cloud
+            downSizeFilterCorner.setInputCloud(globalCornerCloud);
+            downSizeFilterCorner.filter(*globalCornerCloudDS);
+            pcl::io::savePCDFileASCII(savePCDDirectory + "cloudCorner.pcd", *globalCornerCloudDS);
+            // down-sample and save surf cloud
+            downSizeFilterSurf.setInputCloud(globalSurfCloud);
+            downSizeFilterSurf.filter(*globalSurfCloudDS);
+            pcl::io::savePCDFileASCII(savePCDDirectory + "cloudSurf.pcd", *globalSurfCloudDS);
+            // down-sample and save global point cloud map
+            *globalMapCloud += *globalCornerCloud;
+            *globalMapCloud += *globalSurfCloud;
+            pcl::io::savePCDFileASCII(savePCDDirectory + "cloudGlobal.pcd", *globalMapCloud);
+            if (saveJson) {
+                auto jsonFileName = savePCDDirectory + "map.json";
+                functionSave(jsonFileName,globalMapCloud);
+            }
+        }
+
+        cout << "****************************************************" << endl;
+        cout << "Saving map to pcd files completed" << endl;
+    }
+    int functionSave(std::string fileName, pcl::PointCloud<pcl::PointXYZI>::Ptr map_ptr) {
+        rapidjson::Document document;
+        std::stringstream start, end;
+        boost::posix_time::ptime start_time = boost::posix_time::microsec_clock::local_time();
+        start << start_time;
+        std::cout << start.str() << std::endl;
+        document.SetObject();
+        rapidjson::Value value;
+        value.SetString("map");
+        document.AddMember("name", value, document.GetAllocator());
+        value.SetInt(map_ptr->size());
+        document.AddMember("pointSize", value, document.GetAllocator());
+        rapidjson::Value cloud(rapidjson::kObjectType);
+//    rapidjson::Value keyPose6D(rapidjson::kObjectType);
+        rapidjson::Value itemX(rapidjson::kArrayType), itemY(rapidjson::kArrayType), itemZ(
+                rapidjson::kArrayType), itemI(rapidjson::kArrayType);
+        for (auto point : map_ptr->points) {
+            itemX.PushBack(point.x, document.GetAllocator());
+            itemY.PushBack(point.y, document.GetAllocator());
+            itemZ.PushBack(point.z, document.GetAllocator());
+            itemI.PushBack(point.intensity, document.GetAllocator());
+        }
+        cloud.AddMember("x", itemX, document.GetAllocator());
+        cloud.AddMember("y", itemY, document.GetAllocator());
+        cloud.AddMember("z", itemZ, document.GetAllocator());
+        cloud.AddMember("intensity", itemI, document.GetAllocator());
+
+        std::cout << std::endl;
+        rapidjson::Value x(rapidjson::kArrayType), y(rapidjson::kArrayType), z(rapidjson::kArrayType),
+                r(rapidjson::kArrayType), p(rapidjson::kArrayType), yaw(rapidjson::kArrayType), intensity(
+                rapidjson::kArrayType),
+                tm(rapidjson::kArrayType);
+
+        rapidjson::Value cornerPointFrames(rapidjson::kArrayType);
+        document.AddMember("cloud", cloud, document.GetAllocator());
+
+        std::ofstream ofs(fileName);
+        rapidjson::OStreamWrapper osw(ofs);
+        rapidjson::Writer<rapidjson::OStreamWrapper> writer(osw);
+        document.Accept(writer);
+        boost::posix_time::ptime end_time = boost::posix_time::microsec_clock::local_time();
+        end << end_time;
+        std::cout << end.str() << std::endl;
+        return 0;
+    }
+    int functionSave(std::string fileName) {
+        rapidjson::Document document;
+        std::stringstream start, end;
+        boost::posix_time::ptime start_time = boost::posix_time::microsec_clock::local_time();
+        start << start_time;
+        std::cout << start.str() << std::endl;
+        document.SetObject();
+        rapidjson::Value value;
+        value.SetString("map");
+        document.AddMember("name", value, document.GetAllocator());
+        value.SetInt(cloudKeyPoses3D->size());
+        document.AddMember("frameSize", value, document.GetAllocator());
+        rapidjson::Value keyPose3D(rapidjson::kObjectType);
+        rapidjson::Value keyPose6D(rapidjson::kObjectType);
+        rapidjson::Value itemX(rapidjson::kArrayType), itemY(rapidjson::kArrayType), itemZ(
+                rapidjson::kArrayType), itemI(rapidjson::kArrayType);
+        for (auto point : cloudKeyPoses3D->points) {
+            itemX.PushBack(point.x, document.GetAllocator());
+            itemY.PushBack(point.y, document.GetAllocator());
+            itemZ.PushBack(point.z, document.GetAllocator());
+            itemI.PushBack(point.intensity, document.GetAllocator());
+        }
+        keyPose3D.AddMember("x", itemX, document.GetAllocator());
+        keyPose3D.AddMember("y", itemY, document.GetAllocator());
+        keyPose3D.AddMember("z", itemZ, document.GetAllocator());
+        keyPose3D.AddMember("intensity", itemI, document.GetAllocator());
+
+        std::cout << std::endl;
+        rapidjson::Value x(rapidjson::kArrayType), y(rapidjson::kArrayType), z(rapidjson::kArrayType),
+                r(rapidjson::kArrayType), p(rapidjson::kArrayType), yaw(rapidjson::kArrayType), intensity(
+                rapidjson::kArrayType),
+                tm(rapidjson::kArrayType);
+        for (auto pose : cloudKeyPoses6D->points) {
+            x.PushBack(pose.x, document.GetAllocator());
+            y.PushBack(pose.y, document.GetAllocator());
+            z.PushBack(pose.z, document.GetAllocator());
+            r.PushBack(pose.roll, document.GetAllocator());
+            p.PushBack(pose.pitch, document.GetAllocator());
+            yaw.PushBack(pose.yaw, document.GetAllocator());
+            intensity.PushBack(pose.intensity, document.GetAllocator());
+            tm.PushBack(pose.time, document.GetAllocator());
+        }
+        keyPose6D.AddMember("x", x, document.GetAllocator());
+        keyPose6D.AddMember("y", y, document.GetAllocator());
+        keyPose6D.AddMember("z", z, document.GetAllocator());
+        keyPose6D.AddMember("roll", r, document.GetAllocator());
+        keyPose6D.AddMember("pitch", p, document.GetAllocator());
+        keyPose6D.AddMember("yaw", yaw, document.GetAllocator());
+        keyPose6D.AddMember("intensity", intensity, document.GetAllocator());
+        keyPose6D.AddMember("time", tm, document.GetAllocator());
+        rapidjson::Value cornerPointFrames(rapidjson::kArrayType);
+        for (const auto &frame : cornerCloudKeyFrames) {
+            rapidjson::Value px(rapidjson::kArrayType), py(rapidjson::kArrayType), pz(rapidjson::kArrayType), pi(
+                    rapidjson::kArrayType);
+            rapidjson::Value keyFrameJson(rapidjson::kObjectType);
+            for (auto point: frame->points) {
+                px.PushBack(point.x, document.GetAllocator());
+                py.PushBack(point.y, document.GetAllocator());
+                pz.PushBack(point.z, document.GetAllocator());
+                pi.PushBack(point.intensity, document.GetAllocator());
+            }
+            keyFrameJson.AddMember("x", px, document.GetAllocator());
+            keyFrameJson.AddMember("y", py, document.GetAllocator());
+            keyFrameJson.AddMember("z", pz, document.GetAllocator());
+            keyFrameJson.AddMember("intensity", pi, document.GetAllocator());
+            cornerPointFrames.PushBack(keyFrameJson, document.GetAllocator());
+        }
+        rapidjson::Value surfPointFrames(rapidjson::kArrayType);
+
+        for (const auto &frame : surfCloudKeyFrames) {
+            rapidjson::Value px(rapidjson::kArrayType), py(rapidjson::kArrayType), pz(rapidjson::kArrayType), pi(
+                    rapidjson::kArrayType);
+            rapidjson::Value keyFrameJson(rapidjson::kObjectType);
+            for (auto point: frame->points) {
+                px.PushBack(point.x, document.GetAllocator());
+                py.PushBack(point.y, document.GetAllocator());
+                pz.PushBack(point.z, document.GetAllocator());
+                pi.PushBack(point.intensity, document.GetAllocator());
+            }
+            keyFrameJson.AddMember("x", px, document.GetAllocator());
+            keyFrameJson.AddMember("y", py, document.GetAllocator());
+            keyFrameJson.AddMember("z", pz, document.GetAllocator());
+            keyFrameJson.AddMember("intensity", pi, document.GetAllocator());
+            surfPointFrames.PushBack(keyFrameJson, document.GetAllocator());
+        }
+        document.AddMember("keyPose3D", keyPose3D, document.GetAllocator());
+        document.AddMember("keyPose6D", keyPose6D, document.GetAllocator());
+        document.AddMember("cornerPointFrames", cornerPointFrames, document.GetAllocator());
+        document.AddMember("surfPointFrames", surfPointFrames, document.GetAllocator());
+
+        std::ofstream ofs(fileName);
+        rapidjson::OStreamWrapper osw(ofs);
+        rapidjson::Writer<rapidjson::OStreamWrapper> writer(osw);
+        document.Accept(writer);
+        boost::posix_time::ptime end_time = boost::posix_time::microsec_clock::local_time();
+        end << end_time;
+        std::cout << end.str() << std::endl;
+        return 0;
+    }
+
+    void publishGlobalMap() {
+        if (pubLaserCloudSurround.getNumSubscribers() == 0)
+            return;
+
+        if (cloudKeyPoses3D->points.empty() == true)
+            return;
+
+        pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());;
+        pcl::PointCloud<PointType>::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());
+
+        // kd-tree to find near key frames to visualize
+        std::vector<int> pointSearchIndGlobalMap;
+        std::vector<float> pointSearchSqDisGlobalMap;
+        // search near key frames to visualize
+        mtx.lock();
+        kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D);
+        kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->back(), globalMapVisualizationSearchRadius,
+                                      pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);
+        mtx.unlock();
+
+        for (int i = 0; i < (int) pointSearchIndGlobalMap.size(); ++i)
+            globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]);
+        // downsample near selected key frames
+        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses; // for global map visualization
+        downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity,
+                                                    globalMapVisualizationPoseDensity,
+                                                    globalMapVisualizationPoseDensity); // for global map visualization
+        downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);
+        downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);
+
+        // extract visualized and downsampled key frames
+        for (int i = 0; i < (int) globalMapKeyPosesDS->size(); ++i) {
+            if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) >
+                globalMapVisualizationSearchRadius)
+                continue;
+            int thisKeyInd = (int) globalMapKeyPosesDS->points[i].intensity;
+            *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],
+                                                        &cloudKeyPoses6D->points[thisKeyInd]);
+            *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd],
+                                                        &cloudKeyPoses6D->points[thisKeyInd]);
+        }
+        // downsample visualized points
+        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames; // for global map visualization
+        downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize,
+                                                     globalMapVisualizationLeafSize); // for global map visualization
+        downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);
+        downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);
+        publishCloud(&pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, odometryFrame);
+    }
+
+
+    void loopClosureThread() {
+        if (loopClosureEnableFlag == false)
+            return;
+
+        ros::Rate rate(loopClosureFrequency);
+        while (ros::ok()) {
+            rate.sleep();
+            performLoopClosure();
+            visualizeLoopClosure();
+        }
+    }
+
+    void loopInfoHandler(const std_msgs::Float64MultiArray::ConstPtr &loopMsg) {
+        std::lock_guard<std::mutex> lock(mtxLoopInfo);
+        if (loopMsg->data.size() != 2)
+            return;
+
+        loopInfoVec.push_back(*loopMsg);
+
+        while (loopInfoVec.size() > 5)
+            loopInfoVec.pop_front();
+    }
+
+    void performLoopClosure() {
+        if (cloudKeyPoses3D->points.empty() == true)
+            return;
+
+        mtx.lock();
+        *copy_cloudKeyPoses3D = *cloudKeyPoses3D;
+        *copy_cloudKeyPoses6D = *cloudKeyPoses6D;
+        mtx.unlock();
+
+        // find keys
+        int loopKeyCur;
+        int loopKeyPre;
+        if (detectLoopClosureExternal(&loopKeyCur, &loopKeyPre) == false)
+            if (detectLoopClosureDistance(&loopKeyCur, &loopKeyPre) == false)
+                return;
+
+        // extract cloud
+        pcl::PointCloud<PointType>::Ptr cureKeyframeCloud(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr prevKeyframeCloud(new pcl::PointCloud<PointType>());
+        {
+            loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 0);
+            loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum);
+            if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000)
+                return;
+            if (pubHistoryKeyFrames.getNumSubscribers() != 0)
+                publishCloud(&pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, odometryFrame);
+        }
+
+        // ICP Settings
+        static pcl::IterativeClosestPoint<PointType, PointType> icp;
+        icp.setMaxCorrespondenceDistance(historyKeyframeSearchRadius * 2);
+        icp.setMaximumIterations(100);
+        icp.setTransformationEpsilon(1e-6);
+        icp.setEuclideanFitnessEpsilon(1e-6);
+        icp.setRANSACIterations(0);
+
+        // Align clouds
+        icp.setInputSource(cureKeyframeCloud);
+        icp.setInputTarget(prevKeyframeCloud);
+        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
+        icp.align(*unused_result);
+
+        if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)
+            return;
+//        ROS_INFO("close loop score %f", icp.getFitnessScore());
+        // publish corrected cloud
+        if (pubIcpKeyFrames.getNumSubscribers() != 0) {
+            pcl::PointCloud<PointType>::Ptr closed_cloud(new pcl::PointCloud<PointType>());
+            pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation());
+            publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, odometryFrame);
+        }
+
+        // Get pose transformation
+        float x, y, z, roll, pitch, yaw;
+        Eigen::Affine3f correctionLidarFrame;
+        correctionLidarFrame = icp.getFinalTransformation();
+        // transform from world origin to wrong pose
+        Eigen::Affine3f tWrong = pclPointToAffine3f(copy_cloudKeyPoses6D->points[loopKeyCur]);
+        // transform from world origin to corrected pose
+        Eigen::Affine3f tCorrect =
+                correctionLidarFrame * tWrong;// pre-multiplying -> successive rotation about a fixed frame
+        pcl::getTranslationAndEulerAngles(tCorrect, x, y, z, roll, pitch, yaw);
+//        ROS_INFO("prev %f %f %f | %f %f %f ",
+//                 copy_cloudKeyPoses6D->points.back().x,
+//                 copy_cloudKeyPoses6D->points.back().y,
+//                 copy_cloudKeyPoses6D->points.back().z,
+//                 copy_cloudKeyPoses6D->points.back().roll,
+//                 copy_cloudKeyPoses6D->points.back().pitch,
+//                 copy_cloudKeyPoses6D->points.back().yaw
+//                 );
+//        ROS_INFO("after %f %f %f | %f %f %f",x,y,z,roll,pitch,yaw);
+        gtsam::Pose3 poseFrom = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z));
+        gtsam::Pose3 poseTo = pclPointTogtsamPose3(copy_cloudKeyPoses6D->points[loopKeyPre]);
+        gtsam::Vector Vector6(6);
+        float noiseScore = icp.getFitnessScore();
+        Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore;
+        noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6);
+
+        // Add pose constraint
+        mtx.lock();
+        loopIndexQueue.push_back(make_pair(loopKeyCur, loopKeyPre));
+        loopPoseQueue.push_back(poseFrom.between(poseTo));
+        loopNoiseQueue.push_back(constraintNoise);
+        mtx.unlock();
+
+        // add loop constriant
+        loopIndexContainer[loopKeyCur] = loopKeyPre;
+    }
+
+    bool detectLoopClosureDistance(int *latestID, int *closestID) {
+        int loopKeyCur = copy_cloudKeyPoses3D->size() - 1;
+        int loopKeyPre = -1;
+
+        // check loop constraint added before
+        auto it = loopIndexContainer.find(loopKeyCur);
+        if (it != loopIndexContainer.end())
+            return false;
+
+        // find the closest history key frame
+        std::vector<int> pointSearchIndLoop;
+        std::vector<float> pointSearchSqDisLoop;
+        kdtreeHistoryKeyPoses->setInputCloud(copy_cloudKeyPoses3D);
+        kdtreeHistoryKeyPoses->radiusSearch(copy_cloudKeyPoses3D->back(), historyKeyframeSearchRadius,
+                                            pointSearchIndLoop, pointSearchSqDisLoop, 0);
+
+        for (int i = 0; i < (int) pointSearchIndLoop.size(); ++i) {
+            int id = pointSearchIndLoop[i];
+            if (abs(copy_cloudKeyPoses6D->points[id].time - timeLaserInfoCur) > historyKeyframeSearchTimeDiff) {
+                loopKeyPre = id;
+                break;
+            }
+        }
+
+        if (loopKeyPre == -1 || loopKeyCur == loopKeyPre)
+            return false;
+
+        *latestID = loopKeyCur;
+        *closestID = loopKeyPre;
+
+        return true;
+    }
+
+    bool detectLoopClosureExternal(int *latestID, int *closestID) {
+        // this function is not used yet, please ignore it
+        int loopKeyCur = -1;
+        int loopKeyPre = -1;
+
+        std::lock_guard<std::mutex> lock(mtxLoopInfo);
+        if (loopInfoVec.empty())
+            return false;
+
+        double loopTimeCur = loopInfoVec.front().data[0];
+        double loopTimePre = loopInfoVec.front().data[1];
+        loopInfoVec.pop_front();
+
+        if (abs(loopTimeCur - loopTimePre) < historyKeyframeSearchTimeDiff)
+            return false;
+
+        int cloudSize = copy_cloudKeyPoses6D->size();
+        if (cloudSize < 2)
+            return false;
+
+        // latest key
+        loopKeyCur = cloudSize - 1;
+        for (int i = cloudSize - 1; i >= 0; --i) {
+            if (copy_cloudKeyPoses6D->points[i].time >= loopTimeCur)
+                loopKeyCur = round(copy_cloudKeyPoses6D->points[i].intensity);
+            else
+                break;
+        }
+
+        // previous key
+        loopKeyPre = 0;
+        for (int i = 0; i < cloudSize; ++i) {
+            if (copy_cloudKeyPoses6D->points[i].time <= loopTimePre)
+                loopKeyPre = round(copy_cloudKeyPoses6D->points[i].intensity);
+            else
+                break;
+        }
+
+        if (loopKeyCur == loopKeyPre)
+            return false;
+
+        auto it = loopIndexContainer.find(loopKeyCur);
+        if (it != loopIndexContainer.end())
+            return false;
+
+        *latestID = loopKeyCur;
+        *closestID = loopKeyPre;
+
+        return true;
+    }
+
+    void loopFindNearKeyframes(pcl::PointCloud<PointType>::Ptr &nearKeyframes, const int &key, const int &searchNum) {
+        // extract near keyframes
+        nearKeyframes->clear();
+        int cloudSize = copy_cloudKeyPoses6D->size();
+        for (int i = -searchNum; i <= searchNum; ++i) {
+            int keyNear = key + i;
+            if (keyNear < 0 || keyNear >= cloudSize)
+                continue;
+            *nearKeyframes += *transformPointCloud(cornerCloudKeyFrames[keyNear],
+                                                   &copy_cloudKeyPoses6D->points[keyNear]);
+            *nearKeyframes += *transformPointCloud(surfCloudKeyFrames[keyNear], &copy_cloudKeyPoses6D->points[keyNear]);
+        }
+
+        if (nearKeyframes->empty())
+            return;
+
+        // downsample near keyframes
+        pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());
+        downSizeFilterICP.setInputCloud(nearKeyframes);
+        downSizeFilterICP.filter(*cloud_temp);
+        *nearKeyframes = *cloud_temp;
+    }
+
+    void visualizeLoopClosure() {
+        visualization_msgs::MarkerArray markerArray;
+        // loop nodes
+        visualization_msgs::Marker markerNode;
+        markerNode.header.frame_id = odometryFrame;
+        markerNode.header.stamp = timeLaserInfoStamp;
+        markerNode.action = visualization_msgs::Marker::ADD;
+        markerNode.type = visualization_msgs::Marker::SPHERE_LIST;
+        markerNode.ns = "loop_nodes";
+        markerNode.id = 0;
+        markerNode.pose.orientation.w = 1;
+        markerNode.scale.x = 0.3;
+        markerNode.scale.y = 0.3;
+        markerNode.scale.z = 0.3;
+        markerNode.color.r = 0;
+        markerNode.color.g = 0.8;
+        markerNode.color.b = 1;
+        markerNode.color.a = 1;
+        // loop edges
+        visualization_msgs::Marker markerEdge;
+        markerEdge.header.frame_id = odometryFrame;
+        markerEdge.header.stamp = timeLaserInfoStamp;
+        markerEdge.action = visualization_msgs::Marker::ADD;
+        markerEdge.type = visualization_msgs::Marker::LINE_LIST;
+        markerEdge.ns = "loop_edges";
+        markerEdge.id = 1;
+        markerEdge.pose.orientation.w = 1;
+        markerEdge.scale.x = 0.1;
+        markerEdge.scale.y = 0.1;
+        markerEdge.scale.z = 0.1;
+        markerEdge.color.r = 0.9;
+        markerEdge.color.g = 0.9;
+        markerEdge.color.b = 0;
+        markerEdge.color.a = 1;
+
+        for (auto it = loopIndexContainer.begin(); it != loopIndexContainer.end(); ++it) {
+            int key_cur = it->first;
+            int key_pre = it->second;
+            geometry_msgs::Point p;
+            p.x = copy_cloudKeyPoses6D->points[key_cur].x;
+            p.y = copy_cloudKeyPoses6D->points[key_cur].y;
+            p.z = copy_cloudKeyPoses6D->points[key_cur].z;
+            markerNode.points.push_back(p);
+            markerEdge.points.push_back(p);
+            p.x = copy_cloudKeyPoses6D->points[key_pre].x;
+            p.y = copy_cloudKeyPoses6D->points[key_pre].y;
+            p.z = copy_cloudKeyPoses6D->points[key_pre].z;
+            markerNode.points.push_back(p);
+            markerEdge.points.push_back(p);
+        }
+
+        markerArray.markers.push_back(markerNode);
+        markerArray.markers.push_back(markerEdge);
+        pubLoopConstraintEdge.publish(markerArray);
+    }
+
+
+    void updateInitialGuess() {
+        // save current transformation before any processing
+        incrementalOdometryAffineFront = trans2Affine3f(transformTobeMapped);
+        static Eigen::Affine3f lastImuTransformation;
+        // initialization
+        if (cloudKeyPoses3D->points.empty()) {
+            transformTobeMapped[0] = 0;
+            transformTobeMapped[1] = 0;
+            transformTobeMapped[2] = 0;
+
+            if (!useImuHeadingInitialization)
+                transformTobeMapped[2] = 0;
+
+            lastImuTransformation = pcl::getTransformation(0, 0, 0, 0, 0, 0); // save imu before return;
+            return;
+        }
+        // use imu pre-integration estimation for pose guess
+        static bool lastImuPreTransAvailable = false;
+        static Eigen::Affine3f lastImuPreTransformation;
+        if (cloudInfo.odomAvailable == true) {
+            Eigen::Affine3f transBack = pcl::getTransformation(cloudInfo.initialGuessX, cloudInfo.initialGuessY,
+                                                               cloudInfo.initialGuessZ,
+                                                               cloudInfo.initialGuessRoll, cloudInfo.initialGuessPitch,
+                                                               cloudInfo.initialGuessYaw);
+            if (!lastImuPreTransAvailable) {
+                lastImuPreTransformation = transBack;
+                lastImuPreTransAvailable = true;
+            } else {
+                Eigen::Affine3f transIncre = lastImuPreTransformation.inverse() * transBack;
+                Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
+                Eigen::Affine3f transFinal = transTobe * transIncre;
+                pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4],
+                                                  transformTobeMapped[5],
+                                                  transformTobeMapped[0], transformTobeMapped[1],
+                                                  transformTobeMapped[2]);
+                float test[6];
+                pcl::getTranslationAndEulerAngles(transIncre, test[3], test[4],
+                                                  test[5],
+                                                  test[0], test[1],
+                                                  test[2]);
+//                transformTobeMapped[3] =  cloudInfo.initialGuessX;
+//                transformTobeMapped[4] =  cloudInfo.initialGuessY;
+//                transformTobeMapped[5] =  cloudInfo.initialGuessZ;
+//                transformTobeMapped[0] =  cloudInfo.initialGuessRoll;
+//                transformTobeMapped[1] =  cloudInfo.initialGuessPitch;
+//                transformTobeMapped[2] =  cloudInfo.initialGuessYaw;
+                lastImuPreTransformation = transBack;
+                lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                               cloudInfo.imuYawInit); // save imu before return;
+                return;
+            }
+        }
+        return;
+        // use imu incremental estimation for pose guess (only rotation)
+        if (cloudInfo.imuAvailable == true) {
+            Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                               cloudInfo.imuYawInit);
+            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;
+
+            Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
+            Eigen::Affine3f transFinal = transTobe * transIncre;
+            pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4],
+                                              transformTobeMapped[5],
+                                              transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
+
+            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit,
+                                                           cloudInfo.imuYawInit); // save imu before return;
+            transformTobeMapped[0] =  cloudInfo.imuRollInit;
+            transformTobeMapped[1] =  cloudInfo.imuPitchInit;
+            transformTobeMapped[2] =  cloudInfo.imuYawInit;
+            return;
+        }
+    }
+
+    void extractForLoopClosure() {
+        pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());
+        int numPoses = cloudKeyPoses3D->size();
+        for (int i = numPoses - 1; i >= 0; --i) {
+            if ((int) cloudToExtract->size() <= surroundingKeyframeSize)
+                cloudToExtract->push_back(cloudKeyPoses3D->points[i]);
+            else
+                break;
+        }
+
+        extractCloud(cloudToExtract);
+    }
+
+    void extractNearby() {
+        pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>());
+        std::vector<int> pointSearchInd;
+        std::vector<float> pointSearchSqDis;
+
+        // extract all the nearby key poses and downsample them
+        kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-tree
+        kdtreeSurroundingKeyPoses->radiusSearch(cloudKeyPoses3D->back(), (double) surroundingKeyframeSearchRadius,
+                                                pointSearchInd, pointSearchSqDis);
+        for (int i = 0; i < (int) pointSearchInd.size(); ++i) {
+            int id = pointSearchInd[i];
+            surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]);
+        }
+        downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses);
+        downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS);
+        // also extract some latest key frames in case the robot rotates in one position
+        int numPoses = cloudKeyPoses3D->size();
+        for (int i = numPoses - 1; i >= 0; --i) {
+            if (timeLaserInfoCur - cloudKeyPoses6D->points[i].time < 10.0) {
+                surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]);
+            } else {
+                break;
+            }
+        }
+        extractCloud(surroundingKeyPosesDS);
+    }
+
+    void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract) {
+        // fuse the map
+        laserCloudCornerFromMap->clear();
+        laserCloudSurfFromMap->clear();
+        for (int i = 0; i < (int) cloudToExtract->size(); ++i) {
+            if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)
+                continue;
+
+            int thisKeyInd = (int) cloudToExtract->points[i].intensity;
+            if (laserCloudMapContainer.find(thisKeyInd) != laserCloudMapContainer.end()) {
+                // transformed cloud available
+                *laserCloudCornerFromMap += laserCloudMapContainer[thisKeyInd].first;
+                *laserCloudSurfFromMap += laserCloudMapContainer[thisKeyInd].second;
+            } else {
+                // transformed cloud not available
+                pcl::PointCloud<PointType> laserCloudCornerTemp = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],
+                                                                                       &cloudKeyPoses6D->points[thisKeyInd]);
+                pcl::PointCloud<PointType> laserCloudSurfTemp = *transformPointCloud(surfCloudKeyFrames[thisKeyInd],
+                                                                                     &cloudKeyPoses6D->points[thisKeyInd]);
+                *laserCloudCornerFromMap += laserCloudCornerTemp;
+                *laserCloudSurfFromMap += laserCloudSurfTemp;
+                laserCloudMapContainer[thisKeyInd] = make_pair(laserCloudCornerTemp, laserCloudSurfTemp);
+            }
+
+        }
+        // Downsample the surrounding corner key frames (or map)
+        downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
+        downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
+        laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();
+        // Downsample the surrounding surf key frames (or map)
+        downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
+        downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
+        laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();
+        // clear map cache if too large
+        if (laserCloudMapContainer.size() > 1000)
+            laserCloudMapContainer.clear();
+    }
+
+    void extractSurroundingKeyFrames() {
+        if (cloudKeyPoses3D->points.empty() == true)
+            return;
+
+        // if (loopClosureEnableFlag == true)
+        // {
+        //     extractForLoopClosure();
+        // } else {
+        //     extractNearby();
+        // }
+
+        extractNearby();
+    }
+
+    void downsampleCurrentScan() {
+        // Downsample cloud from current scan
+        laserCloudCornerLastDS->clear();
+        downSizeFilterCorner.setInputCloud(laserCloudCornerLast);
+        downSizeFilterCorner.filter(*laserCloudCornerLastDS);
+        laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();
+
+        laserCloudSurfLastDS->clear();
+        downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
+        downSizeFilterSurf.filter(*laserCloudSurfLastDS);
+        laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();
+
+    }
+
+    void updatePointAssociateToMap() {
+        transPointAssociateToMap = trans2Affine3f(transformTobeMapped);
+    }
+
+    void cornerOptimization() {
+        updatePointAssociateToMap();
+
+#pragma omp parallel for num_threads(numberOfCores)
+        for (int i = 0; i < laserCloudCornerLastDSNum; i++) {
+            PointType pointOri, pointSel, coeff;
+            std::vector<int> pointSearchInd;
+            std::vector<float> pointSearchSqDis;
+
+            pointOri = laserCloudCornerLastDS->points[i];
+            pointAssociateToMap(&pointOri, &pointSel);
+            kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
+
+            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));
+            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));
+
+            if (pointSearchSqDis[4] < 1.0) {
+                float cx = 0, cy = 0, cz = 0;
+                for (int j = 0; j < 5; j++) {
+                    cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;
+                    cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;
+                    cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;
+                }
+                cx /= 5;
+                cy /= 5;
+                cz /= 5;
+
+                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
+                for (int j = 0; j < 5; j++) {
+                    float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;
+                    float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;
+                    float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;
+
+                    a11 += ax * ax;
+                    a12 += ax * ay;
+                    a13 += ax * az;
+                    a22 += ay * ay;
+                    a23 += ay * az;
+                    a33 += az * az;
+                }
+                a11 /= 5;
+                a12 /= 5;
+                a13 /= 5;
+                a22 /= 5;
+                a23 /= 5;
+                a33 /= 5;
+
+                matA1.at<float>(0, 0) = a11;
+                matA1.at<float>(0, 1) = a12;
+                matA1.at<float>(0, 2) = a13;
+                matA1.at<float>(1, 0) = a12;
+                matA1.at<float>(1, 1) = a22;
+                matA1.at<float>(1, 2) = a23;
+                matA1.at<float>(2, 0) = a13;
+                matA1.at<float>(2, 1) = a23;
+                matA1.at<float>(2, 2) = a33;
+
+                cv::eigen(matA1, matD1, matV1);
+
+                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {
+
+                    float x0 = pointSel.x;
+                    float y0 = pointSel.y;
+                    float z0 = pointSel.z;
+                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);
+                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);
+                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);
+                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);
+                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);
+                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);
+
+                    float a012 = sqrt(((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) *
+                                      ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                      + ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) *
+                                        ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))
+                                      + ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)) *
+                                        ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)));
+
+                    float l12 = sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2));
+
+                    float la = ((y1 - y2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                + (z1 - z2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))) / a012 / l12;
+
+                    float lb = -((x1 - x2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1))
+                                 - (z1 - z2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12;
+
+                    float lc = -((x1 - x2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))
+                                 + (y1 - y2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12;
+
+                    float ld2 = a012 / l12;
+
+                    float s = 1 - 0.9 * fabs(ld2);
+
+                    coeff.x = s * la;
+                    coeff.y = s * lb;
+                    coeff.z = s * lc;
+                    coeff.intensity = s * ld2;
+
+                    if (s > 0.1) {
+                        laserCloudOriCornerVec[i] = pointOri;
+                        coeffSelCornerVec[i] = coeff;
+                        laserCloudOriCornerFlag[i] = true;
+                    }
+                }
+            }
+        }
+    }
+
+    void surfOptimization() {
+        updatePointAssociateToMap();
+
+#pragma omp parallel for num_threads(numberOfCores)
+        for (int i = 0; i < laserCloudSurfLastDSNum; i++) {
+            PointType pointOri, pointSel, coeff;
+            std::vector<int> pointSearchInd;
+            std::vector<float> pointSearchSqDis;
+
+            pointOri = laserCloudSurfLastDS->points[i];
+            pointAssociateToMap(&pointOri, &pointSel);
+            kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
+
+            Eigen::Matrix<float, 5, 3> matA0;
+            Eigen::Matrix<float, 5, 1> matB0;
+            Eigen::Vector3f matX0;
+
+            matA0.setZero();
+            matB0.fill(-1);
+            matX0.setZero();
+
+            if (pointSearchSqDis[4] < 1.0) {
+                for (int j = 0; j < 5; j++) {
+                    matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;
+                    matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;
+                    matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;
+                }
+
+                matX0 = matA0.colPivHouseholderQr().solve(matB0);
+
+                float pa = matX0(0, 0);
+                float pb = matX0(1, 0);
+                float pc = matX0(2, 0);
+                float pd = 1;
+
+                float ps = sqrt(pa * pa + pb * pb + pc * pc);
+                pa /= ps;
+                pb /= ps;
+                pc /= ps;
+                pd /= ps;
+
+                bool planeValid = true;
+                for (int j = 0; j < 5; j++) {
+                    if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +
+                             pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +
+                             pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {
+                        planeValid = false;
+                        break;
+                    }
+                }
+
+                if (planeValid) {
+                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;
+
+                    float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x
+                                                              + pointSel.y * pointSel.y + pointSel.z * pointSel.z));
+
+                    coeff.x = s * pa;
+                    coeff.y = s * pb;
+                    coeff.z = s * pc;
+                    coeff.intensity = s * pd2;
+
+                    if (s > 0.1) {
+                        laserCloudOriSurfVec[i] = pointOri;
+                        coeffSelSurfVec[i] = coeff;
+                        laserCloudOriSurfFlag[i] = true;
+                    }
+                }
+            }
+        }
+    }
+
+    void combineOptimizationCoeffs() {
+        // combine corner coeffs
+        for (int i = 0; i < laserCloudCornerLastDSNum; ++i) {
+            if (laserCloudOriCornerFlag[i] == true) {
+                laserCloudOri->push_back(laserCloudOriCornerVec[i]);
+                coeffSel->push_back(coeffSelCornerVec[i]);
+            }
+        }
+        // combine surf coeffs
+        for (int i = 0; i < laserCloudSurfLastDSNum; ++i) {
+            if (laserCloudOriSurfFlag[i] == true) {
+                laserCloudOri->push_back(laserCloudOriSurfVec[i]);
+                coeffSel->push_back(coeffSelSurfVec[i]);
+            }
+        }
+        // reset flag for next iteration
+        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
+        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
+    }
+
+    bool LMOptimization(int iterCount) {
+        // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation
+        // lidar <- camera      ---     camera <- lidar
+        // x = z                ---     x = y
+        // y = x                ---     y = z
+        // z = y                ---     z = x
+        // roll = yaw           ---     roll = pitch
+        // pitch = roll         ---     pitch = yaw
+        // yaw = pitch          ---     yaw = roll
+
+        // lidar -> camera
+        float srx = sin(transformTobeMapped[1]);
+        float crx = cos(transformTobeMapped[1]);
+        float sry = sin(transformTobeMapped[2]);
+        float cry = cos(transformTobeMapped[2]);
+        float srz = sin(transformTobeMapped[0]);
+        float crz = cos(transformTobeMapped[0]);
+
+        int laserCloudSelNum = laserCloudOri->size();
+        if (laserCloudSelNum < 50) {
+            return false;
+        }
+
+        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));
+        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
+        cv::Mat matP(6, 6, CV_32F, cv::Scalar::all(0));
+
+        PointType pointOri, coeff;
+
+        for (int i = 0; i < laserCloudSelNum; i++) {
+            // lidar -> camera
+            pointOri.x = laserCloudOri->points[i].y;
+            pointOri.y = laserCloudOri->points[i].z;
+            pointOri.z = laserCloudOri->points[i].x;
+            // lidar -> camera
+            coeff.x = coeffSel->points[i].y;
+            coeff.y = coeffSel->points[i].z;
+            coeff.z = coeffSel->points[i].x;
+            coeff.intensity = coeffSel->points[i].intensity;
+            // in camera
+            float arx = (crx * sry * srz * pointOri.x + crx * crz * sry * pointOri.y - srx * sry * pointOri.z) * coeff.x
+                        + (-srx * srz * pointOri.x - crz * srx * pointOri.y - crx * pointOri.z) * coeff.y
+                        + (crx * cry * srz * pointOri.x + crx * cry * crz * pointOri.y - cry * srx * pointOri.z) *
+                          coeff.z;
+
+            float ary = ((cry * srx * srz - crz * sry) * pointOri.x
+                         + (sry * srz + cry * crz * srx) * pointOri.y + crx * cry * pointOri.z) * coeff.x
+                        + ((-cry * crz - srx * sry * srz) * pointOri.x
+                           + (cry * srz - crz * srx * sry) * pointOri.y - crx * sry * pointOri.z) * coeff.z;
+
+            float arz =
+                    ((crz * srx * sry - cry * srz) * pointOri.x + (-cry * crz - srx * sry * srz) * pointOri.y) * coeff.x
+                    + (crx * crz * pointOri.x - crx * srz * pointOri.y) * coeff.y
+                    +
+                    ((sry * srz + cry * crz * srx) * pointOri.x + (crz * sry - cry * srx * srz) * pointOri.y) * coeff.z;
+            // lidar -> camera
+            matA.at<float>(i, 0) = arz;
+            matA.at<float>(i, 1) = arx;
+            matA.at<float>(i, 2) = ary;
+            matA.at<float>(i, 3) = coeff.z;
+            matA.at<float>(i, 4) = coeff.x;
+            matA.at<float>(i, 5) = coeff.y;
+            matB.at<float>(i, 0) = -coeff.intensity;
+        }
+
+        cv::transpose(matA, matAt);
+        matAtA = matAt * matA;
+        matAtB = matAt * matB;
+        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);
+
+        if (iterCount == 0) {
+
+            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));
+            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));
+
+            cv::eigen(matAtA, matE, matV);
+            matV.copyTo(matV2);
+
+            isDegenerate = false;
+            float eignThre[6] = {100, 100, 100, 100, 100, 100};
+            for (int i = 5; i >= 0; i--) {
+                if (matE.at<float>(0, i) < eignThre[i]) {
+                    for (int j = 0; j < 6; j++) {
+                        matV2.at<float>(i, j) = 0;
+                    }
+                    isDegenerate = true;
+                } else {
+                    break;
+                }
+            }
+            matP = matV.inv() * matV2;
+        }
+
+        if (isDegenerate) {
+            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
+            matX.copyTo(matX2);
+            matX = matP * matX2;
+        }
+
+        transformTobeMapped[0] += matX.at<float>(0, 0);
+        transformTobeMapped[1] += matX.at<float>(1, 0);
+        transformTobeMapped[2] += matX.at<float>(2, 0);
+        transformTobeMapped[3] += matX.at<float>(3, 0);
+        transformTobeMapped[4] += matX.at<float>(4, 0);
+        transformTobeMapped[5] += matX.at<float>(5, 0);
+
+        float deltaR = sqrt(
+                pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +
+                pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +
+                pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
+        float deltaT = sqrt(
+                pow(matX.at<float>(3, 0) * 100, 2) +
+                pow(matX.at<float>(4, 0) * 100, 2) +
+                pow(matX.at<float>(5, 0) * 100, 2));
+
+        if (deltaR < 0.05 && deltaT < 0.05) {
+            return true; // converged
+        }
+        return false; // keep optimizing
+    }
+
+    void scan2MapOptimization() {
+        if (cloudKeyPoses3D->points.empty())
+            return;
+
+        if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum) {
+            kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);
+            kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);
+
+            for (int iterCount = 0; iterCount < 30; iterCount++) {
+                laserCloudOri->clear();
+                coeffSel->clear();
+
+                cornerOptimization();
+                surfOptimization();
+
+                combineOptimizationCoeffs();
+
+                if (LMOptimization(iterCount) == true)
+                    break;
+            }
+
+            transformUpdate();
+        } else {
+            ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum,
+                     laserCloudSurfLastDSNum);
+        }
+    }
+
+    void transformUpdate() {
+        if (cloudInfo.imuAvailable == true) {
+            if (std::abs(cloudInfo.imuPitchInit) < 1.4) {
+                double imuWeight = imuRPYWeight;
+                tf::Quaternion imuQuaternion;
+                tf::Quaternion transformQuaternion;
+                double rollMid, pitchMid, yawMid;
+
+                // slerp roll
+                transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);
+                imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
+                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
+                transformTobeMapped[0] = rollMid;
+
+                // slerp pitch
+                transformQuaternion.setRPY(0, transformTobeMapped[1], 0);
+                imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
+                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
+                transformTobeMapped[1] = pitchMid;
+            }
+        }
+
+        transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);
+        transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);
+        transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);
+
+        incrementalOdometryAffineBack = trans2Affine3f(transformTobeMapped);
+    }
+
+    float constraintTransformation(float value, float limit) {
+        if (value < -limit)
+            value = -limit;
+        if (value > limit)
+            value = limit;
+
+        return value;
+    }
+
+    bool saveFrame() {
+        if (cloudKeyPoses3D->points.empty())
+            return true;
+
+        Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());
+        Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4],
+                                                            transformTobeMapped[5],
+                                                            transformTobeMapped[0], transformTobeMapped[1],
+                                                            transformTobeMapped[2]);
+        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
+        float x, y, z, roll, pitch, yaw;
+        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);
+
+        if (abs(roll) < surroundingkeyframeAddingAngleThreshold &&
+            abs(pitch) < surroundingkeyframeAddingAngleThreshold &&
+            abs(yaw) < surroundingkeyframeAddingAngleThreshold &&
+            sqrt(x * x + y * y + z * z) < surroundingkeyframeAddingDistThreshold)
+            return false;
+
+        return true;
+    }
+
+    void addOdomFactor() {
+        if (cloudKeyPoses3D->points.empty()) {
+            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances(
+                    (Vector(6) << 1e-2, 1e-2, M_PI * M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter
+            gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));
+            initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));
+        } else {
+            noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances(
+                    (Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
+            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());
+            gtsam::Pose3 poseTo = trans2gtsamPose(transformTobeMapped);
+            gtSAMgraph.add(
+                    BetweenFactor<Pose3>(cloudKeyPoses3D->size() - 1, cloudKeyPoses3D->size(), poseFrom.between(poseTo),
+                                         odometryNoise));
+            initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);
+        }
+    }
+
+    void addGPSFactor() {
+        if (gpsQueue.empty())
+            return;
+
+        // wait for system initialized and settles down
+        if (cloudKeyPoses3D->points.empty())
+            return;
+        else {
+            if (pointDistance(cloudKeyPoses3D->front(), cloudKeyPoses3D->back()) < 5.0)
+                return;
+        }
+
+        // pose covariance small, no need to correct
+        if (poseCovariance(3, 3) < poseCovThreshold && poseCovariance(4, 4) < poseCovThreshold)
+            return;
+
+        // last gps position
+        static PointType lastGPSPoint;
+
+        while (!gpsQueue.empty()) {
+            if (gpsQueue.front().header.stamp.toSec() < timeLaserInfoCur - 0.2) {
+                // message too old
+                gpsQueue.pop_front();
+            } else if (gpsQueue.front().header.stamp.toSec() > timeLaserInfoCur + 0.2) {
+                // message too new
+                break;
+            } else {
+                nav_msgs::Odometry thisGPS = gpsQueue.front();
+                gpsQueue.pop_front();
+
+                // GPS too noisy, skip
+                float noise_x = thisGPS.pose.covariance[0];
+                float noise_y = thisGPS.pose.covariance[7];
+                float noise_z = thisGPS.pose.covariance[14];
+                if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold)
+                    continue;
+
+                float gps_x = thisGPS.pose.pose.position.x;
+                float gps_y = thisGPS.pose.pose.position.y;
+                float gps_z = thisGPS.pose.pose.position.z;
+                if (!useGpsElevation) {
+                    gps_z = transformTobeMapped[5];
+                    noise_z = 0.01;
+                }
+
+                // GPS not properly initialized (0,0,0)
+                if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)
+                    continue;
+
+                // Add GPS every a few meters
+                PointType curGPSPoint;
+                curGPSPoint.x = gps_x;
+                curGPSPoint.y = gps_y;
+                curGPSPoint.z = gps_z;
+                if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)
+                    continue;
+                else
+                    lastGPSPoint = curGPSPoint;
+
+                gtsam::Vector Vector3(3);
+                Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);
+                noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3);
+                gtsam::GPSFactor gps_factor(cloudKeyPoses3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise);
+                gtSAMgraph.add(gps_factor);
+
+                aLoopIsClosed = true;
+                break;
+            }
+        }
+    }
+
+    void addLoopFactor() {
+        if (loopIndexQueue.empty())
+            return;
+
+        for (int i = 0; i < (int) loopIndexQueue.size(); ++i) {
+            int indexFrom = loopIndexQueue[i].first;
+            int indexTo = loopIndexQueue[i].second;
+            gtsam::Pose3 poseBetween = loopPoseQueue[i];
+            gtsam::noiseModel::Diagonal::shared_ptr noiseBetween = loopNoiseQueue[i];
+            gtSAMgraph.add(BetweenFactor<Pose3>(indexFrom, indexTo, poseBetween, noiseBetween));
+        }
+
+        loopIndexQueue.clear();
+        loopPoseQueue.clear();
+        loopNoiseQueue.clear();
+        aLoopIsClosed = true;
+    }
+
+    void saveKeyFramesAndFactor() {
+        if (saveFrame() == false)
+            return;
+
+        // odom factor
+        addOdomFactor();
+
+        // gps factor
+        addGPSFactor();
+
+        // loop factor
+        addLoopFactor();
+
+        // cout << "****************************************************" << endl;
+        // gtSAMgraph.print("GTSAM Graph:\n");
+
+        // update iSAM
+        isam->update(gtSAMgraph, initialEstimate);
+        isam->update();
+
+        if (aLoopIsClosed == true) {
+            isam->update();
+            isam->update();
+            isam->update();
+            isam->update();
+            isam->update();
+        }
+
+        gtSAMgraph.resize(0);
+        initialEstimate.clear();
+
+        //save key poses
+        PointType thisPose3D;
+        PointTypePose thisPose6D;
+        Pose3 latestEstimate;
+
+        isamCurrentEstimate = isam->calculateEstimate();
+        latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size() - 1);
+        // cout << "****************************************************" << endl;
+        // isamCurrentEstimate.print("Current estimate: ");
+
+        thisPose3D.x = latestEstimate.translation().x();
+        thisPose3D.y = latestEstimate.translation().y();
+        thisPose3D.z = latestEstimate.translation().z();
+        thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index
+        cloudKeyPoses3D->push_back(thisPose3D);
+
+        thisPose6D.x = thisPose3D.x;
+        thisPose6D.y = thisPose3D.y;
+        thisPose6D.z = thisPose3D.z;
+        thisPose6D.intensity = thisPose3D.intensity; // this can be used as index
+        thisPose6D.roll = latestEstimate.rotation().roll();
+        thisPose6D.pitch = latestEstimate.rotation().pitch();
+        thisPose6D.yaw = latestEstimate.rotation().yaw();
+        thisPose6D.time = timeLaserInfoCur;
+        cloudKeyPoses6D->push_back(thisPose6D);
+
+        // cout << "****************************************************" << endl;
+        // cout << "Pose covariance:" << endl;
+        // cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;
+        poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size() - 1);
+
+        // save updated transform
+        transformTobeMapped[0] = latestEstimate.rotation().roll();
+        transformTobeMapped[1] = latestEstimate.rotation().pitch();
+        transformTobeMapped[2] = latestEstimate.rotation().yaw();
+        transformTobeMapped[3] = latestEstimate.translation().x();
+        transformTobeMapped[4] = latestEstimate.translation().y();
+        transformTobeMapped[5] = latestEstimate.translation().z();
+
+        // save all the received edge and surf points
+        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
+        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
+        pcl::copyPointCloud(*laserCloudCornerLastDS, *thisCornerKeyFrame);
+        pcl::copyPointCloud(*laserCloudSurfLastDS, *thisSurfKeyFrame);
+
+        // save key frame cloud
+        cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
+        surfCloudKeyFrames.push_back(thisSurfKeyFrame);
+
+        // save path for visualization
+        updatePath(thisPose6D);
+    }
+
+    void correctPoses() {
+        if (cloudKeyPoses3D->points.empty())
+            return;
+
+        if (aLoopIsClosed == true) {
+            // clear map cache
+            laserCloudMapContainer.clear();
+            // clear path
+            globalPath.poses.clear();
+            // update key poses
+            int numPoses = isamCurrentEstimate.size();
+            for (int i = 0; i < numPoses; ++i) {
+                cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at<Pose3>(i).translation().x();
+                cloudKeyPoses3D->points[i].y = isamCurrentEstimate.at<Pose3>(i).translation().y();
+                cloudKeyPoses3D->points[i].z = isamCurrentEstimate.at<Pose3>(i).translation().z();
+
+                cloudKeyPoses6D->points[i].x = cloudKeyPoses3D->points[i].x;
+                cloudKeyPoses6D->points[i].y = cloudKeyPoses3D->points[i].y;
+                cloudKeyPoses6D->points[i].z = cloudKeyPoses3D->points[i].z;
+                cloudKeyPoses6D->points[i].roll = isamCurrentEstimate.at<Pose3>(i).rotation().roll();
+                cloudKeyPoses6D->points[i].pitch = isamCurrentEstimate.at<Pose3>(i).rotation().pitch();
+                cloudKeyPoses6D->points[i].yaw = isamCurrentEstimate.at<Pose3>(i).rotation().yaw();
+
+                updatePath(cloudKeyPoses6D->points[i]);
+            }
+
+            aLoopIsClosed = false;
+        }
+    }
+
+    void updatePath(const PointTypePose &pose_in) {
+        geometry_msgs::PoseStamped pose_stamped;
+        pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);
+        pose_stamped.header.frame_id = odometryFrame;
+        pose_stamped.pose.position.x = pose_in.x;
+        pose_stamped.pose.position.y = pose_in.y;
+        pose_stamped.pose.position.z = pose_in.z;
+        tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
+        pose_stamped.pose.orientation.x = q.x();
+        pose_stamped.pose.orientation.y = q.y();
+        pose_stamped.pose.orientation.z = q.z();
+        pose_stamped.pose.orientation.w = q.w();
+
+        globalPath.poses.push_back(pose_stamped);
+    }
+
+    void publishOdometry() {
+        // Publish odometry for ROS (global)
+        nav_msgs::Odometry laserOdometryROS;
+        laserOdometryROS.header.stamp = timeLaserInfoStamp;
+        laserOdometryROS.header.frame_id = odometryFrame;
+        laserOdometryROS.child_frame_id = "odom_mapping";
+        laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];
+        laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];
+        laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];
+        laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0],
+                                                                                         transformTobeMapped[1],
+                                                                                         transformTobeMapped[2]);
+        pubLaserOdometryGlobal.publish(laserOdometryROS);
+
+        // Publish TF
+        static tf::TransformBroadcaster br;
+        tf::Transform t_odom_to_lidar = tf::Transform(
+                tf::createQuaternionFromRPY(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]),
+                tf::Vector3(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5]));
+        tf::StampedTransform trans_odom_to_lidar = tf::StampedTransform(t_odom_to_lidar, timeLaserInfoStamp,
+                                                                        odometryFrame, lidarFrame);
+        br.sendTransform(trans_odom_to_lidar);
+
+        // Publish odometry for ROS (incremental)
+        static bool lastIncreOdomPubFlag = false;
+        static nav_msgs::Odometry laserOdomIncremental; // incremental odometry msg
+        static Eigen::Affine3f increOdomAffine; // incremental odometry in affine
+        if (lastIncreOdomPubFlag == false) {
+            lastIncreOdomPubFlag = true;
+            laserOdomIncremental = laserOdometryROS;
+            increOdomAffine = trans2Affine3f(transformTobeMapped);
+        } else {
+            Eigen::Affine3f affineIncre = incrementalOdometryAffineFront.inverse() * incrementalOdometryAffineBack;
+            increOdomAffine = increOdomAffine * affineIncre;
+            float x, y, z, roll, pitch, yaw;
+            pcl::getTranslationAndEulerAngles(increOdomAffine, x, y, z, roll, pitch, yaw);
+            if (cloudInfo.imuAvailable == true) {
+                if (std::abs(cloudInfo.imuPitchInit) < 1.4) {
+                    double imuWeight = 0.1;
+                    tf::Quaternion imuQuaternion;
+                    tf::Quaternion transformQuaternion;
+                    double rollMid, pitchMid, yawMid;
+
+                    // slerp roll
+                    transformQuaternion.setRPY(roll, 0, 0);
+                    imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
+                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid,
+                                                                                              yawMid);
+                    roll = rollMid;
+
+                    // slerp pitch
+                    transformQuaternion.setRPY(0, pitch, 0);
+                    imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
+                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid,
+                                                                                              yawMid);
+                    pitch = pitchMid;
+                }
+            }
+            laserOdomIncremental.header.stamp = timeLaserInfoStamp;
+            laserOdomIncremental.header.frame_id = odometryFrame;
+            laserOdomIncremental.child_frame_id = "odom_mapping";
+            laserOdomIncremental.pose.pose.position.x = x;
+            laserOdomIncremental.pose.pose.position.y = y;
+            laserOdomIncremental.pose.pose.position.z = z;
+            laserOdomIncremental.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
+            if (isDegenerate)
+                laserOdomIncremental.pose.covariance[0] = 1;
+            else
+                laserOdomIncremental.pose.covariance[0] = 0;
+        }
+        pubLaserOdometryIncremental.publish(laserOdomIncremental);
+    }
+
+    void publishFrames() {
+        if (cloudKeyPoses3D->points.empty())
+            return;
+        // publish key poses
+        publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, odometryFrame);
+        // Publish surrounding key frames
+        publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, odometryFrame);
+        // publish registered key frame
+        if (pubRecentKeyFrame.getNumSubscribers() != 0) {
+            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
+            *cloudOut += *transformPointCloud(laserCloudCornerLastDS, &thisPose6D);
+            *cloudOut += *transformPointCloud(laserCloudSurfLastDS, &thisPose6D);
+            publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, odometryFrame);
+        }
+        // publish registered high-res raw cloud
+        if (pubCloudRegisteredRaw.getNumSubscribers() != 0) {
+            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
+            pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);
+            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
+            *cloudOut = *transformPointCloud(cloudOut, &thisPose6D);
+            publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, odometryFrame);
+        }
+        // publish path
+        if (pubPath.getNumSubscribers() != 0) {
+            globalPath.header.stamp = timeLaserInfoStamp;
+            globalPath.header.frame_id = odometryFrame;
+            pubPath.publish(globalPath);
+        }
+    }
+};
+
+
+int main(int argc, char **argv) {
+    ros::init(argc, argv, "lio_sam");
+
+    mapOptimization MO;
+
+    ROS_INFO("\033[1;32m----> Map Optimization Started.\033[0m");
+
+    std::thread loopthread(&mapOptimization::loopClosureThread, &MO);
+    std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);
+
+    ros::spin();
+
+    loopthread.join();
+    visualizeMapThread.join();
+
+    return 0;
+}