Browse Source

first commit

Lenotary 3 năm trước cách đây
commit
bd3cf81b65
99 tập tin đã thay đổi với 3228 bổ sung0 xóa
  1. 241 0
      CMakeLists.txt
  2. 0 0
      README.md
  3. 29 0
      config/param.yaml
  4. 29 0
      config/param720.yaml
  5. BIN
      data/00.jpg
  6. BIN
      data/calibrationdata.tar.gz
  7. BIN
      data/calibrationdata720.tar.gz
  8. BIN
      data/calibrationdata720/left-0000.png
  9. BIN
      data/calibrationdata720/left-0001.png
  10. BIN
      data/calibrationdata720/left-0002.png
  11. BIN
      data/calibrationdata720/left-0003.png
  12. BIN
      data/calibrationdata720/left-0004.png
  13. BIN
      data/calibrationdata720/left-0005.png
  14. BIN
      data/calibrationdata720/left-0006.png
  15. BIN
      data/calibrationdata720/left-0007.png
  16. BIN
      data/calibrationdata720/left-0008.png
  17. BIN
      data/calibrationdata720/left-0009.png
  18. BIN
      data/calibrationdata720/left-0010.png
  19. BIN
      data/calibrationdata720/left-0011.png
  20. BIN
      data/calibrationdata720/left-0012.png
  21. BIN
      data/calibrationdata720/left-0013.png
  22. BIN
      data/calibrationdata720/left-0014.png
  23. BIN
      data/calibrationdata720/left-0015.png
  24. BIN
      data/calibrationdata720/left-0016.png
  25. BIN
      data/calibrationdata720/left-0017.png
  26. BIN
      data/calibrationdata720/left-0018.png
  27. BIN
      data/calibrationdata720/left-0019.png
  28. BIN
      data/calibrationdata720/left-0020.png
  29. BIN
      data/calibrationdata720/left-0021.png
  30. BIN
      data/calibrationdata720/left-0022.png
  31. BIN
      data/calibrationdata720/left-0023.png
  32. BIN
      data/calibrationdata720/left-0024.png
  33. BIN
      data/calibrationdata720/left-0025.png
  34. BIN
      data/calibrationdata720/left-0026.png
  35. BIN
      data/calibrationdata720/left-0027.png
  36. BIN
      data/calibrationdata720/left-0028.png
  37. BIN
      data/calibrationdata720/left-0029.png
  38. BIN
      data/calibrationdata720/left-0030.png
  39. BIN
      data/calibrationdata720/left-0031.png
  40. BIN
      data/calibrationdata720/left-0032.png
  41. BIN
      data/calibrationdata720/left-0033.png
  42. BIN
      data/calibrationdata720/left-0034.png
  43. BIN
      data/calibrationdata720/left-0035.png
  44. BIN
      data/calibrationdata720/left-0036.png
  45. BIN
      data/calibrationdata720/left-0037.png
  46. BIN
      data/calibrationdata720/left-0038.png
  47. BIN
      data/calibrationdata720/left-0039.png
  48. BIN
      data/calibrationdata720/left-0040.png
  49. BIN
      data/calibrationdata720/left-0041.png
  50. BIN
      data/calibrationdata720/left-0042.png
  51. BIN
      data/calibrationdata720/left-0043.png
  52. BIN
      data/calibrationdata720/left-0044.png
  53. BIN
      data/calibrationdata720/left-0045.png
  54. BIN
      data/calibrationdata720/left-0046.png
  55. BIN
      data/calibrationdata720/left-0047.png
  56. BIN
      data/calibrationdata720/left-0048.png
  57. BIN
      data/calibrationdata720/left-0049.png
  58. BIN
      data/calibrationdata720/left-0050.png
  59. BIN
      data/calibrationdata720/left-0051.png
  60. BIN
      data/calibrationdata720/left-0052.png
  61. BIN
      data/calibrationdata720/left-0053.png
  62. 30 0
      data/calibrationdata720/ost.txt
  63. 26 0
      data/calibrationdata720/ost.yaml
  64. 30 0
      data/ost.txt
  65. 26 0
      data/ost.yaml
  66. 50 0
      include/v4l2_cv_mat/V4l2Access.h
  67. 70 0
      include/v4l2_cv_mat/V4l2Capture.h
  68. 182 0
      include/v4l2_cv_mat/V4l2Device.h
  69. 50 0
      include/v4l2_cv_mat/V4l2MmapDevice.h
  70. 37 0
      include/v4l2_cv_mat/V4l2Output.h
  71. 31 0
      include/v4l2_cv_mat/V4l2ReadWriteDevice.h
  72. 83 0
      include/v4l2_cv_mat/logger.h
  73. 134 0
      launch/include/rviz.rviz
  74. 8 0
      launch/run.launch
  75. 18 0
      msg/.vscode/c_cpp_properties.json
  76. 10 0
      msg/.vscode/settings.json
  77. 2 0
      msg/PoseArray.msg
  78. 46 0
      ost.txt
  79. 27 0
      ost.yaml
  80. 80 0
      package.xml
  81. 12 0
      script/camera_read.py
  82. BIN
      script/chessboard.bmp
  83. BIN
      script/chessboard.docx
  84. 91 0
      script/generateChessboard.py
  85. 19 0
      src/.vscode/c_cpp_properties.json
  86. 6 0
      src/.vscode/settings.json
  87. 2 0
      src/camera.cpp
  88. 29 0
      src/camera.h
  89. 21 0
      src/gtsam_test.cpp
  90. 510 0
      src/mark_extract.cpp
  91. 99 0
      src/mark_extract.h
  92. 113 0
      src/node.cpp
  93. 20 0
      src/v4l2_cv_mat/V4l2Access.cpp
  94. 165 0
      src/v4l2_cv_mat/V4l2Capture.cpp
  95. 446 0
      src/v4l2_cv_mat/V4l2Device.cpp
  96. 307 0
      src/v4l2_cv_mat/V4l2MmapDevice.cpp
  97. 106 0
      src/v4l2_cv_mat/V4l2Output.cpp
  98. 29 0
      src/v4l2_cv_mat/V4l2ReadWriteDevice.cpp
  99. 14 0
      src/v4l2_cv_mat/logger.cpp

+ 241 - 0
CMakeLists.txt

@@ -0,0 +1,241 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(top_mark)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+
+
+find_package(catkin REQUIRED COMPONENTS
+        nav_msgs
+        roscpp
+        rospy
+        sensor_msgs
+        std_msgs
+        cv_bridge
+        image_transport
+        cv_bridge
+        message_generation
+        tf
+        )
+find_package(OpenCV REQUIRED)
+find_package(GTSAM REQUIRED QUIET)
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+##   your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+##   * add a build_depend tag for "message_generation"
+##   * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+##   * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+##     but can be declared for certainty nonetheless:
+##     * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+##   * add "message_generation" and every package in MSG_DEP_SET to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * add "message_runtime" and every package in MSG_DEP_SET to
+##     catkin_package(CATKIN_DEPENDS ...)
+##   * uncomment the add_*_files sections below as needed
+##     and list every .msg/.srv/.action file to be processed
+##   * uncomment the generate_messages entry below
+##   * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+# Generate messages in the 'msg' folder
+add_message_files(
+        FILES
+        PoseArray.msg
+)
+
+## Generate services in the 'srv' folder
+# add_service_files(
+#   FILES
+#   Service1.srv
+#   Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+#   FILES
+#   Action1.action
+#   Action2.action
+# )
+
+# Generate added messages and services with any dependencies listed here
+generate_messages(
+        DEPENDENCIES
+        nav_msgs#   sensor_msgs#   std_msgs
+        geometry_msgs
+)
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+##   * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+##   * add "dynamic_reconfigure" to
+##     find_package(catkin REQUIRED COMPONENTS ...)
+##   * uncomment the "generate_dynamic_reconfigure_options" section below
+##     and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+#   cfg/DynReconf1.cfg
+#   cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+        DEPENDS
+        #  INCLUDE_DIRS include
+        #  LIBRARIES top_mark
+        #  CATKIN_DEPENDS nav_msgs roscpp rospy sensor_msgs std_msgs
+        #  DEPENDS system_lib
+        GTSAM
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+        include
+        include/v4l2_cv_mat
+        ${catkin_INCLUDE_DIRS}
+        ${OpenCV_INCLUDE_DIRS}
+        ${GTSAM_INCLUDE_DIR}
+)
+
+link_directories(
+        ${OpenCV_LIBRARY_DIRS}
+        ${GTSAM_LIBRARY_DIRS}
+)
+# define executable to build
+aux_source_directory(src/v4l2_cv_mat SRC_FILES)
+add_library(v4l2cpp SHARED ${SRC_FILES})
+target_link_libraries(v4l2cpp ${OpenCV_LIBS})
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+#   src/${PROJECT_NAME}/top_mark.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+# Declare a C++ executable
+# With catkin_make all packages are built within a single CMake context
+# The recommended prefix ensures that target names across packages don't collide
+add_executable(${PROJECT_NAME}_node src/node.cpp src/camera.cpp src/mark_extract.cpp)
+add_executable(${PROJECT_NAME}_gtsam_node src/gtsam_test.cpp)
+
+# Rename C++ executable without prefix
+# The above recommended prefix causes long target names, the following renames the
+# target back to the shorter version for ease of user use
+# e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME ${PROJECT_NAME}_node PREFIX "")
+set_target_properties(${PROJECT_NAME}_gtsam_node PROPERTIES OUTPUT_NAME ${PROJECT_NAME}_gtsam_node PREFIX "")
+
+# Add cmake target dependencies of the executable
+# same as for the library above
+add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+add_dependencies(${PROJECT_NAME}_gtsam_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+# Specify libraries to link a library or executable target against
+target_link_libraries(${PROJECT_NAME}_node
+        ${catkin_LIBRARIES}
+        v4l2cpp
+        ${OpenCV_LIBS}
+        )
+# Specify libraries to link a library or executable target against
+target_link_libraries(${PROJECT_NAME}_gtsam_node
+        ${catkin_LIBRARIES}
+        v4l2cpp
+        ${OpenCV_LIBS}
+        gtsam
+        )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+#   scripts/my_python_script
+#   DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+#   RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+#   ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+#   RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+#   DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+#   FILES_MATCHING PATTERN "*.h"
+#   PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+#   # myfile1
+#   # myfile2
+#   DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_top_mark.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+#   target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)

+ 0 - 0
README.md


+ 29 - 0
config/param.yaml

@@ -0,0 +1,29 @@
+threshold: 50
+point_diameter: 0.025 #点的直径
+point_distance: 0.06  #一般两倍 点的直径
+image_width: 1920
+image_height: 1080
+camera_name: narrow_stereo
+camera_matrix:
+  rows: 3
+  cols: 3
+  data: [926.02513,   0.     , 938.86119,
+         0.     , 930.43114, 531.07557,
+         0.     ,   0.     ,   1.     ]
+distortion_model: plumb_bob
+distortion_coefficients:
+  rows: 1
+  cols: 5
+  data: [0.002693, -0.022956, 0.000658, 0.001861, 0.000000]
+rectification_matrix:
+  rows: 3
+  cols: 3
+  data: [1., 0., 0.,
+         0., 1., 0.,
+         0., 0., 1.]
+projection_matrix:
+  rows: 3
+  cols: 4
+  data: [833.93768,   0.     , 941.96793,   0.     ,
+         0.     , 867.64252, 531.1505 ,   0.     ,
+         0.     ,   0.     ,   1.     ,   0.     ]

+ 29 - 0
config/param720.yaml

@@ -0,0 +1,29 @@
+threshold: 50
+point_diameter: 0.015 #点的直径
+point_distance: 0.03  #一般两倍 点的直径
+image_width: 1280
+image_height: 720
+camera_name: narrow_stereo
+camera_matrix:
+  rows: 3
+  cols: 3
+  data: [622.16373,   0.     , 625.97052,
+         0.     , 619.8234 , 355.25153,
+         0.     ,   0.     ,   1.     ]
+distortion_model: plumb_bob
+distortion_coefficients:
+  rows: 1
+  cols: 5
+  data: [-0.005746, -0.009235, 0.003051, -0.003447, 0.000000]
+rectification_matrix:
+  rows: 3
+  cols: 3
+  data: [1., 0., 0.,
+         0., 1., 0.,
+         0., 0., 1.]
+projection_matrix:
+  rows: 3
+  cols: 4
+  data: [568.35791,   0.     , 616.01846,   0.     ,
+         0.     , 577.48132, 356.89346,   0.     ,
+         0.     ,   0.     ,   1.     ,   0.     ]

BIN
data/00.jpg


BIN
data/calibrationdata.tar.gz


BIN
data/calibrationdata720.tar.gz


BIN
data/calibrationdata720/left-0000.png


BIN
data/calibrationdata720/left-0001.png


BIN
data/calibrationdata720/left-0002.png


BIN
data/calibrationdata720/left-0003.png


BIN
data/calibrationdata720/left-0004.png


BIN
data/calibrationdata720/left-0005.png


BIN
data/calibrationdata720/left-0006.png


BIN
data/calibrationdata720/left-0007.png


BIN
data/calibrationdata720/left-0008.png


BIN
data/calibrationdata720/left-0009.png


BIN
data/calibrationdata720/left-0010.png


BIN
data/calibrationdata720/left-0011.png


BIN
data/calibrationdata720/left-0012.png


BIN
data/calibrationdata720/left-0013.png


BIN
data/calibrationdata720/left-0014.png


BIN
data/calibrationdata720/left-0015.png


BIN
data/calibrationdata720/left-0016.png


BIN
data/calibrationdata720/left-0017.png


BIN
data/calibrationdata720/left-0018.png


BIN
data/calibrationdata720/left-0019.png


BIN
data/calibrationdata720/left-0020.png


BIN
data/calibrationdata720/left-0021.png


BIN
data/calibrationdata720/left-0022.png


BIN
data/calibrationdata720/left-0023.png


BIN
data/calibrationdata720/left-0024.png


BIN
data/calibrationdata720/left-0025.png


BIN
data/calibrationdata720/left-0026.png


BIN
data/calibrationdata720/left-0027.png


BIN
data/calibrationdata720/left-0028.png


BIN
data/calibrationdata720/left-0029.png


BIN
data/calibrationdata720/left-0030.png


BIN
data/calibrationdata720/left-0031.png


BIN
data/calibrationdata720/left-0032.png


BIN
data/calibrationdata720/left-0033.png


BIN
data/calibrationdata720/left-0034.png


BIN
data/calibrationdata720/left-0035.png


BIN
data/calibrationdata720/left-0036.png


BIN
data/calibrationdata720/left-0037.png


BIN
data/calibrationdata720/left-0038.png


BIN
data/calibrationdata720/left-0039.png


BIN
data/calibrationdata720/left-0040.png


BIN
data/calibrationdata720/left-0041.png


BIN
data/calibrationdata720/left-0042.png


BIN
data/calibrationdata720/left-0043.png


BIN
data/calibrationdata720/left-0044.png


BIN
data/calibrationdata720/left-0045.png


BIN
data/calibrationdata720/left-0046.png


BIN
data/calibrationdata720/left-0047.png


BIN
data/calibrationdata720/left-0048.png


BIN
data/calibrationdata720/left-0049.png


BIN
data/calibrationdata720/left-0050.png


BIN
data/calibrationdata720/left-0051.png


BIN
data/calibrationdata720/left-0052.png


BIN
data/calibrationdata720/left-0053.png


+ 30 - 0
data/calibrationdata720/ost.txt

@@ -0,0 +1,30 @@
+# oST version 5.0 parameters
+
+
+[image]
+
+width
+1280
+
+height
+720
+
+[narrow_stereo]
+
+camera matrix
+582.163728 0.000000 625.970520
+0.000000 579.823401 355.251530
+0.000000 0.000000 1.000000
+
+distortion
+-0.005746 -0.009235 0.003051 -0.003447 0.000000
+
+rectification
+1.000000 0.000000 0.000000
+0.000000 1.000000 0.000000
+0.000000 0.000000 1.000000
+
+projection
+568.357910 0.000000 616.018462 0.000000
+0.000000 577.481323 356.893457 0.000000
+0.000000 0.000000 1.000000 0.000000

+ 26 - 0
data/calibrationdata720/ost.yaml

@@ -0,0 +1,26 @@
+image_width: 1280
+image_height: 720
+camera_name: narrow_stereo
+camera_matrix:
+  rows: 3
+  cols: 3
+  data: [582.16373,   0.     , 625.97052,
+           0.     , 579.8234 , 355.25153,
+           0.     ,   0.     ,   1.     ]
+distortion_model: plumb_bob
+distortion_coefficients:
+  rows: 1
+  cols: 5
+  data: [-0.005746, -0.009235, 0.003051, -0.003447, 0.000000]
+rectification_matrix:
+  rows: 3
+  cols: 3
+  data: [1., 0., 0.,
+         0., 1., 0.,
+         0., 0., 1.]
+projection_matrix:
+  rows: 3
+  cols: 4
+  data: [568.35791,   0.     , 616.01846,   0.     ,
+           0.     , 577.48132, 356.89346,   0.     ,
+           0.     ,   0.     ,   1.     ,   0.     ]

+ 30 - 0
data/ost.txt

@@ -0,0 +1,30 @@
+# oST version 5.0 parameters
+
+
+[image]
+
+width
+1920
+
+height
+1080
+
+[narrow_stereo]
+
+camera matrix
+866.025129 0.000000 938.861191
+0.000000 870.431136 531.075569
+0.000000 0.000000 1.000000
+
+distortion
+0.002693 -0.022956 0.000658 0.001861 0.000000
+
+rectification
+1.000000 0.000000 0.000000
+0.000000 1.000000 0.000000
+0.000000 0.000000 1.000000
+
+projection
+833.937683 0.000000 941.967931 0.000000
+0.000000 867.642517 531.150495 0.000000
+0.000000 0.000000 1.000000 0.000000

+ 26 - 0
data/ost.yaml

@@ -0,0 +1,26 @@
+image_width: 1920
+image_height: 1080
+camera_name: narrow_stereo
+camera_matrix:
+  rows: 3
+  cols: 3
+  data: [866.02513,   0.     , 938.86119,
+           0.     , 870.43114, 531.07557,
+           0.     ,   0.     ,   1.     ]
+distortion_model: plumb_bob
+distortion_coefficients:
+  rows: 1
+  cols: 5
+  data: [0.002693, -0.022956, 0.000658, 0.001861, 0.000000]
+rectification_matrix:
+  rows: 3
+  cols: 3
+  data: [1., 0., 0.,
+         0., 1., 0.,
+         0., 0., 1.]
+projection_matrix:
+  rows: 3
+  cols: 4
+  data: [833.93768,   0.     , 941.96793,   0.     ,
+           0.     , 867.64252, 531.1505 ,   0.     ,
+           0.     ,   0.     ,   1.     ,   0.     ]

+ 50 - 0
include/v4l2_cv_mat/V4l2Access.h

@@ -0,0 +1,50 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Access.h
+** 
+** V4L2 wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_ACCESS
+#define V4L2_ACCESS
+
+#include "V4l2Device.h"
+
+class V4l2Access
+{
+	public:
+		enum IoType
+		{
+			IOTYPE_READWRITE,
+			IOTYPE_MMAP
+		};
+		
+		V4l2Access(V4l2Device* device);
+		virtual ~V4l2Access();
+		
+		int getFd()         { return m_device->getFd();         }
+		unsigned int getBufferSize() { return m_device->getBufferSize(); }
+		unsigned int getFormat()     { return m_device->getFormat();     }
+		unsigned int getWidth()      { return m_device->getWidth();      }
+		unsigned int getHeight()     { return m_device->getHeight();     }
+		void queryFormat()  { m_device->queryFormat();          }
+
+		int isReady()       { return m_device->isReady();       }
+		int start()         { return m_device->start();         }
+		int stop()          { return m_device->stop();          }
+
+	private:
+		V4l2Access(const V4l2Access&);
+		V4l2Access & operator=(const V4l2Access&);
+	
+	protected:
+		V4l2Device* m_device;		
+};
+
+
+#endif

+ 70 - 0
include/v4l2_cv_mat/V4l2Capture.h

@@ -0,0 +1,70 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Capture.h
+** 
+** V4L2 Capture wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_CAPTURE
+#define V4L2_CAPTURE
+
+#include "V4l2Access.h"
+#include "opencv2/core/core.hpp"
+
+
+// ---------------------------------
+// V4L2 Capture
+// ---------------------------------
+class V4l2Capture : public V4l2Access
+{		
+	protected:	
+		V4l2Capture(V4l2Device* device);
+	
+	public:
+        /**
+         * @brief create 创建Capture
+         * @param param
+             *使用说明,我们读取UVC免驱的摄像头时,应该避免直接使用opencv的videocpature,
+             * 因为简单的API使得我们并不知道我们到底获取的时摄像头的哪种图片格式。应该直接使用Qt v4l2 test benchmark软件去获取我们真正需要的
+             * 图像帧格式。
+             * V4L2_PIX_FMT_MJPEG (MJPEG)
+            使用范例 V4L2DeviceParameters param("/dev/video0", V4L2_PIX_FMT_MJPEG , 1920, 1080, 30, 0,verbose);
+         * @param iotype
+         * @return
+         */
+		static V4l2Capture* create(const V4L2DeviceParameters & param, IoType iotype = V4l2Access::IOTYPE_MMAP);
+		virtual ~V4l2Capture();
+
+        size_t read(char* buffer, size_t bufferSize);
+        /**
+         * @brief read 读取图像
+         * @param readImage 获取的图像
+         * @return
+         */
+        int read(cv::Mat &readImage);
+        /**
+         * @brief isReadable 判断是都可读取图像
+         * @param tv 等待的时间
+         *      timeval tv;
+                tv.tv_sec=1;
+                tv.tv_usec=0;
+                int ret = this->isReadable(&tv);
+         *
+         * @return -1 不可读      0 超时        1 成功
+         */
+        int isReadable(timeval* tv);
+        /**
+         * @brief getBusInfo 获取总线的地址,多个摄像头输入设备时便于判断我们现在采集的到底是哪个摄像头
+         * @return
+         */
+        const char * getBusInfo();
+
+};
+
+
+#endif

+ 182 - 0
include/v4l2_cv_mat/V4l2Device.h

@@ -0,0 +1,182 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Device.h
+** 
+** V4L2 wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_DEVICE
+#define V4L2_DEVICE
+
+#include <string>
+#include <list>
+#include <linux/videodev2.h>
+#include <fcntl.h>
+
+#ifndef V4L2_PIX_FMT_VP8
+#define V4L2_PIX_FMT_VP8  v4l2_fourcc('V', 'P', '8', '0')
+#endif
+#ifndef V4L2_PIX_FMT_VP9
+#define V4L2_PIX_FMT_VP9  v4l2_fourcc('V', 'P', '9', '0')
+#endif
+#ifndef V4L2_PIX_FMT_HEVC
+#define V4L2_PIX_FMT_HEVC  v4l2_fourcc('H', 'E', 'V', 'C')
+#endif
+
+// ---------------------------------
+// V4L2 Device parameters
+// ---------------------------------
+/*
+ * YUV 格式对照表,根据Qt v4l2 test benchmark软件(debain类系统 apt-get install qv4l2 安装)的capture image format一栏进行对应
+2.6.1. Packed YUV formats
+2.6.2. V4L2_PIX_FMT_GREY (‘GREY’)
+2.6.3. V4L2_PIX_FMT_Y10 (‘Y10 ‘)
+2.6.4. V4L2_PIX_FMT_Y12 (‘Y12 ‘)
+2.6.5. V4L2_PIX_FMT_Y10BPACK (‘Y10B’)
+2.6.6. V4L2_PIX_FMT_Y16 (‘Y16 ‘)
+2.6.7. V4L2_PIX_FMT_Y16_BE (‘Y16 ‘ | (1 << 31))
+2.6.8. V4L2_PIX_FMT_Y8I (‘Y8I ‘)
+2.6.9. V4L2_PIX_FMT_Y12I (‘Y12I’)
+2.6.10. V4L2_PIX_FMT_UV8 (‘UV8’)
+2.6.11. V4L2_PIX_FMT_YUYV (‘YUYV’)
+2.6.12. V4L2_PIX_FMT_UYVY (‘UYVY’)
+2.6.13. V4L2_PIX_FMT_YVYU (‘YVYU’)
+2.6.14. V4L2_PIX_FMT_VYUY (‘VYUY’)
+2.6.15. V4L2_PIX_FMT_Y41P (‘Y41P’)
+2.6.16. V4L2_PIX_FMT_YVU420 (‘YV12’), V4L2_PIX_FMT_YUV420 (‘YU12’)
+2.6.17. V4L2_PIX_FMT_YUV420M (‘YM12’), V4L2_PIX_FMT_YVU420M (‘YM21’)
+2.6.18. V4L2_PIX_FMT_YUV422M (‘YM16’), V4L2_PIX_FMT_YVU422M (‘YM61’)
+2.6.19. V4L2_PIX_FMT_YUV444M (‘YM24’), V4L2_PIX_FMT_YVU444M (‘YM42’)
+2.6.20. V4L2_PIX_FMT_YVU410 (‘YVU9’), V4L2_PIX_FMT_YUV410 (‘YUV9’)
+2.6.21. V4L2_PIX_FMT_YUV422P (‘422P’)
+2.6.22. V4L2_PIX_FMT_YUV411P (‘411P’)
+2.6.23. V4L2_PIX_FMT_NV12 (‘NV12’), V4L2_PIX_FMT_NV21 (‘NV21’)
+2.6.24. V4L2_PIX_FMT_NV12M (‘NM12’), V4L2_PIX_FMT_NV21M (‘NM21’), V4L2_PIX_FMT_NV12MT_16X16
+2.6.25. V4L2_PIX_FMT_NV12MT (‘TM12’)
+2.6.26. V4L2_PIX_FMT_NV16 (‘NV16’), V4L2_PIX_FMT_NV61 (‘NV61’)
+2.6.27. V4L2_PIX_FMT_NV16M (‘NM16’), V4L2_PIX_FMT_NV61M (‘NM61’)
+2.6.28. V4L2_PIX_FMT_NV24 (‘NV24’), V4L2_PIX_FMT_NV42 (‘NV42’)
+2.6.29. V4L2_PIX_FMT_M420 (‘M420’)
+*/
+struct V4L2DeviceParameters 
+{   /**
+     * @brief V4L2DeviceParameters
+     * @param devname 设备名,如输入"/dev/video0"
+     * @param formatList 多个图像帧格式
+     * @param width 设置的输出图像宽,并不一定生效。
+     * @param height 设置的输出图像高,并不一定生效。
+     * @param fps 设置的输出图像帧率,并不一定生效。
+     * @param m_exp_mode 曝光模式
+     * @param m_exp_level  曝光登记
+     * @param m_exp_abs  曝光绝对值
+     * @param input_index 单设备多输入的情况下,指定输入index的参数,如果单设备单输入,直接传入参数0
+     * @param verbose
+     * @param openFlags
+
+     */
+    V4L2DeviceParameters(const char* devname, const std::list<unsigned int> & formatList, unsigned int width, unsigned int height, int fps,
+                         __s32 exp_mode, int exp_level = 0, int exp_abs = 5,
+                         unsigned int input_index = 0, int verbose = 0, int openFlags = O_RDWR | O_NONBLOCK) :
+        m_devName(devname), m_inputIndex(input_index), m_formatList(formatList), m_width(width), m_height(height), m_fps(fps),
+        m_verbose(verbose), m_openFlags(openFlags),m_exp_mode(exp_mode),m_exp_level(exp_level),m_exp_abs(exp_abs) {}
+    /**
+     * @brief V4L2DeviceParameters
+     * @param devname
+     * @param format 图像帧格式
+     * @param width 设置的输出图像宽,并不一定生效。
+     * @param height 设置的输出图像高,并不一定生效。
+     * @param fps 设置的输出图像帧率,并不一定生效。
+     * @param m_exp_mode 曝光模式
+     * @param m_exp_level  曝光登记
+     * @param m_exp_abs  曝光绝对值
+     * @param input_index 单设备多输入的情况下,指定输入index的参数,如果单设备单输入,直接传入参数0
+     * @param verbose
+     * @param openFlags
+     */
+    V4L2DeviceParameters(const char* devname, unsigned int format, unsigned int width, unsigned int height, int fps,
+                         __s32 exp_mode, int exp_level = 0, int exp_abs = 5,
+                         unsigned int input_index = 0, int verbose = 0, int openFlags = O_RDWR | O_NONBLOCK) :
+        m_devName(devname), m_inputIndex(input_index), m_width(width), m_height(height), m_fps(fps), m_verbose(verbose), m_openFlags(openFlags),
+        m_exp_mode(exp_mode),m_exp_level(exp_level),m_exp_abs(exp_abs){
+			if (format) {
+				m_formatList.push_back(format);
+			}
+	}
+		
+	std::string m_devName;
+    unsigned int m_inputIndex;
+	std::list<unsigned int> m_formatList;
+	unsigned int m_width;
+	unsigned int m_height;
+	int m_fps;			
+	int m_verbose;
+	int m_openFlags;
+	int m_exp_level;
+	int m_exp_abs;
+    __s32 m_exp_mode;
+
+};
+
+// ---------------------------------
+// V4L2 Device
+// ---------------------------------
+class V4l2Device
+{		
+	friend class V4l2Capture;
+	friend class V4l2Output;
+	
+	protected:	
+		void close();	
+	
+        int initdevice(const char *dev_name , unsigned int mandatoryCapabilities );
+		int checkCapabilities(int fd, unsigned int mandatoryCapabilities);
+		int configureFormat(int fd);
+		int configureFormat(int fd, unsigned int format, unsigned int width, unsigned int height);
+		int configureParam(int fd);
+		int configureExp(int fd);
+
+        virtual bool init(unsigned int mandatoryCapabilities);
+		virtual size_t writeInternal(char*, size_t) { return -1; }
+		virtual bool startPartialWrite(void)        { return false; }
+		virtual size_t writePartialInternal(char*, size_t) { return -1; }
+		virtual bool endPartialWrite(void)          { return false; }
+		virtual size_t readInternal(char*, size_t)  { return -1; }
+	
+	public:
+		V4l2Device(const V4L2DeviceParameters&  params, v4l2_buf_type deviceType);		
+		virtual ~V4l2Device();
+	
+		virtual bool isReady() { return (m_fd != -1); }
+		virtual bool start()   { return true; }
+		virtual bool stop()    { return true; }
+	
+		unsigned int getBufferSize() { return m_bufferSize; }
+		unsigned int getFormat()     { return m_format;     }
+		unsigned int getWidth()      { return m_width;      }
+		unsigned int getHeight()     { return m_height;     }
+        unsigned char *getBusInfo() { return  bus_info;    }
+		int getFd()         { return m_fd;         }
+		void queryFormat();	
+
+	protected:
+		V4L2DeviceParameters m_params;
+		int m_fd;
+		v4l2_buf_type m_deviceType;	
+	
+		unsigned int m_bufferSize;
+		unsigned int m_format;
+		unsigned int m_width;
+		unsigned int m_height;	
+
+		struct v4l2_buffer m_partialWriteBuf;
+		bool m_partialWriteInProgress;
+        unsigned char bus_info[32];
+};
+
+
+#endif

+ 50 - 0
include/v4l2_cv_mat/V4l2MmapDevice.h

@@ -0,0 +1,50 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2MmapDevice.h
+** 
+** V4L2 source using mmap API
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_MMAP_DEVICE
+#define V4L2_MMAP_DEVICE
+ 
+#include "V4l2Device.h"
+
+#define V4L2MMAP_NBBUFFER 10
+
+class V4l2MmapDevice : public V4l2Device
+{	
+	protected:	
+		size_t writeInternal(char* buffer, size_t bufferSize);
+		bool startPartialWrite(void);
+		size_t writePartialInternal(char*, size_t);
+		bool endPartialWrite(void);
+		size_t readInternal(char* buffer, size_t bufferSize);
+			
+	public:
+		V4l2MmapDevice(const V4L2DeviceParameters & params, v4l2_buf_type deviceType);		
+		virtual ~V4l2MmapDevice();
+
+		virtual bool init(unsigned int mandatoryiCapabilities);
+		virtual bool isReady() { return  ((m_fd != -1)&& (n_buffers != 0)); }
+		virtual bool start();
+		virtual bool stop();
+	
+	protected:
+		unsigned int  n_buffers;
+	
+		struct buffer 
+		{
+			void *                  start;
+			size_t                  length;
+		};
+		buffer m_buffer[V4L2MMAP_NBBUFFER];
+};
+
+#endif
+

+ 37 - 0
include/v4l2_cv_mat/V4l2Output.h

@@ -0,0 +1,37 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Output.h
+** 
+** V4L2 Output wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_OUTPUT
+#define V4L2_OUTPUT
+
+#include "V4l2Access.h"
+
+// ---------------------------------
+// V4L2 Output
+// ---------------------------------
+class V4l2Output : public V4l2Access
+{		
+	protected:
+		V4l2Output(V4l2Device* device);
+
+	public:
+		static V4l2Output* create(const V4L2DeviceParameters & param, IoType iotype = V4l2Access::IOTYPE_MMAP);
+		~V4l2Output();
+	
+		size_t write(char* buffer, size_t bufferSize);
+		int    isWritable(timeval* tv);
+		bool   startPartialWrite(void);
+		size_t writePartial(char* buffer, size_t bufferSize);
+		bool   endPartialWrite(void);
+};
+
+#endif

+ 31 - 0
include/v4l2_cv_mat/V4l2ReadWriteDevice.h

@@ -0,0 +1,31 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2ReadWriteDevice.h
+** 
+** V4L2 source using read/write API
+**
+** -------------------------------------------------------------------------*/
+
+
+#ifndef V4L2_RW_DEVICE
+#define V4L2_RW_DEVICE
+ 
+#include "V4l2Device.h"
+
+
+class V4l2ReadWriteDevice : public V4l2Device
+{	
+	protected:	
+		virtual size_t writeInternal(char* buffer, size_t bufferSize);
+		virtual size_t readInternal(char* buffer, size_t bufferSize);
+		
+	public:
+		V4l2ReadWriteDevice(const V4L2DeviceParameters&  params, v4l2_buf_type deviceType);
+};
+
+
+#endif
+

+ 83 - 0
include/v4l2_cv_mat/logger.h

@@ -0,0 +1,83 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** logger.h
+** 
+** -------------------------------------------------------------------------*/
+
+#ifndef LOGGER_H
+#define LOGGER_H
+
+#include <unistd.h>
+
+#ifdef HAVE_LOG4CPP
+#include "log4cpp/Category.hh"
+#include "log4cpp/FileAppender.hh"
+#include "log4cpp/PatternLayout.hh"
+
+#define LOG(__level)  log4cpp::Category::getRoot() << log4cpp::Priority::__level << __FILE__ << ":" << __LINE__ << "\n\t" 
+
+inline void initLogger(int verbose)
+{
+	// initialize log4cpp
+	log4cpp::Category &log = log4cpp::Category::getRoot();
+	log4cpp::Appender *app = new log4cpp::FileAppender("root", fileno(stdout));
+	if (app)
+	{
+		log4cpp::PatternLayout *plt = new log4cpp::PatternLayout();
+		if (plt)
+		{
+			plt->setConversionPattern("%d [%-6p] - %m%n");
+			app->setLayout(plt);
+		}
+		log.addAppender(app);
+	}
+	switch (verbose)
+	{
+		case 2: log.setPriority(log4cpp::Priority::DEBUG); break;
+		case 1: log.setPriority(log4cpp::Priority::INFO); break;
+		default: log.setPriority(log4cpp::Priority::NOTICE); break;
+		
+	}
+	LOG(INFO) << "level:" << log4cpp::Priority::getPriorityName(log.getPriority()); 
+}
+#else
+
+typedef enum {EMERG  = 0,
+                      FATAL  = 0,
+                      ALERT  = 100,
+                      CRIT   = 200,
+                      ERROR  = 300,
+                      WARN   = 400,
+                      NOTICE = 500,
+                      INFO   = 600,
+                      DEBUG  = 700,
+                      NOTSET = 800
+} PriorityLevel;
+
+#include <iostream>
+extern int LogLevel;
+#define LOG(__level) if (__level<=LogLevel) std::cout << "\n[" << #__level << "] " << __FILE__ << ":" << __LINE__ << "\n\t"
+
+//#define LOG(__level) if (__level<=LogLevel) std::cout << "\n[" << #__level << "] " << __FILE__ << ":" << __LINE__ << "\n\t"
+//#define LOG(__level) if (__level<=LogLevel) std::cout<< "\n"
+
+inline void initLogger(int verbose)
+{
+        switch (verbose)
+        {
+                case 2: LogLevel=DEBUG; break;
+                case 1: LogLevel=INFO; break;
+                default: LogLevel=NOTICE; break;
+
+        }
+    std::cout << "log level:" << LogLevel << std::endl;
+
+#endif
+	
+#endif
+
+
+}

+ 134 - 0
launch/include/rviz.rviz

@@ -0,0 +1,134 @@
+Panels:
+  - Class: rviz/Displays
+    Help Height: 96
+    Name: Displays
+    Property Tree Widget:
+      Expanded:
+        - /Global Options1
+        - /Status1
+      Splitter Ratio: 0.5
+    Tree Height: 336
+  - Class: rviz/Selection
+    Name: Selection
+  - Class: rviz/Tool Properties
+    Expanded:
+      - /2D Pose Estimate1
+      - /2D Nav Goal1
+      - /Publish Point1
+    Name: Tool Properties
+    Splitter Ratio: 0.5886790156364441
+  - Class: rviz/Views
+    Expanded:
+      - /Current View1
+    Name: Views
+    Splitter Ratio: 0.5
+  - Class: rviz/Time
+    Experimental: false
+    Name: Time
+    SyncMode: 0
+    SyncSource: Image
+Preferences:
+  PromptSaveOnExit: true
+Toolbars:
+  toolButtonStyle: 2
+Visualization Manager:
+  Class: ""
+  Displays:
+    - Alpha: 0.5
+      Cell Size: 1
+      Class: rviz/Grid
+      Color: 160; 160; 164
+      Enabled: true
+      Line Style:
+        Line Width: 0.029999999329447746
+        Value: Lines
+      Name: Grid
+      Normal Cell Count: 0
+      Offset:
+        X: 0
+        Y: 0
+        Z: 0
+      Plane: XY
+      Plane Cell Count: 10
+      Reference Frame: <Fixed Frame>
+      Value: true
+    - Class: rviz/Image
+      Enabled: true
+      Image Topic: /video_image
+      Max Value: 1
+      Median window: 5
+      Min Value: 0
+      Name: Image
+      Normalize Range: true
+      Queue Size: 2
+      Transport Hint: raw
+      Unreliable: false
+      Value: true
+  Enabled: true
+  Global Options:
+    Background Color: 48; 48; 48
+    Default Light: true
+    Fixed Frame: map
+    Frame Rate: 30
+  Name: root
+  Tools:
+    - Class: rviz/Interact
+      Hide Inactive Objects: true
+    - Class: rviz/MoveCamera
+    - Class: rviz/Select
+    - Class: rviz/FocusCamera
+    - Class: rviz/Measure
+    - Class: rviz/SetInitialPose
+      Theta std deviation: 0.2617993950843811
+      Topic: /initialpose
+      X std deviation: 0.5
+      Y std deviation: 0.5
+    - Class: rviz/SetGoal
+      Topic: /move_base_simple/goal
+    - Class: rviz/PublishPoint
+      Single click: true
+      Topic: /clicked_point
+  Value: true
+  Views:
+    Current:
+      Class: rviz/Orbit
+      Distance: 10
+      Enable Stereo Rendering:
+        Stereo Eye Separation: 0.05999999865889549
+        Stereo Focal Distance: 1
+        Swap Stereo Eyes: false
+        Value: false
+      Field of View: 0.7853981852531433
+      Focal Point:
+        X: 0
+        Y: 0
+        Z: 0
+      Focal Shape Fixed Size: true
+      Focal Shape Size: 0.05000000074505806
+      Invert Z Axis: false
+      Name: Current View
+      Near Clip Distance: 0.009999999776482582
+      Pitch: 0.785398006439209
+      Target Frame: <Fixed Frame>
+      Yaw: 0.785398006439209
+    Saved: ~
+Window Geometry:
+  Displays:
+    collapsed: false
+  Height: 846
+  Hide Left Dock: false
+  Hide Right Dock: false
+  Image:
+    collapsed: false
+  QMainWindow State: 000000ff00000000fd00000004000000000000048c000002abfc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000007e00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073000000002e000001e70000010e00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d006100670065010000002e000002ab0000001e00ffffff000000010000010f000002abfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e000002ab000000d800fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b00000004afc0100000002fb0000000800540069006d00650100000000000004b00000028900fffffffb0000000800540069006d0065010000000000000450000000000000000000000020000002ab00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+  Selection:
+    collapsed: false
+  Time:
+    collapsed: false
+  Tool Properties:
+    collapsed: false
+  Views:
+    collapsed: false
+  Width: 1200
+  X: 2127
+  Y: 82

+ 8 - 0
launch/run.launch

@@ -0,0 +1,8 @@
+<launch>
+
+    <node pkg="top_mark" type="top_mark_node" name="camera_node" output="screen">
+        <rosparam file="$(find top_mark)/config/param720.yaml" command="load" />
+    </node>
+    <node pkg="rviz" type="rviz" name="rviz" args="-d $(find top_mark)/launch/include/rviz.rviz" />
+
+</launch>

+ 18 - 0
msg/.vscode/c_cpp_properties.json

@@ -0,0 +1,18 @@
+{
+  "configurations": [
+    {
+      "browse": {
+        "databaseFilename": "",
+        "limitSymbolsToIncludedHeaders": true
+      },
+      "includePath": [
+        "/home/lenotary/work/code/top_mark_ws/devel/include/**",
+        "/opt/ros/noetic/include/**",
+        "/home/lenotary/work/code/top_mark_ws/src/top_mark/include/**",
+        "/usr/include/**"
+      ],
+      "name": "ROS"
+    }
+  ],
+  "version": 4
+}

+ 10 - 0
msg/.vscode/settings.json

@@ -0,0 +1,10 @@
+{
+    "python.autoComplete.extraPaths": [
+        "/home/lenotary/work/code/top_mark_ws/devel/lib/python3/dist-packages",
+        "/opt/ros/noetic/lib/python3/dist-packages"
+    ],
+    "python.analysis.extraPaths": [
+        "/home/lenotary/work/code/top_mark_ws/devel/lib/python3/dist-packages",
+        "/opt/ros/noetic/lib/python3/dist-packages"
+    ]
+}

+ 2 - 0
msg/PoseArray.msg

@@ -0,0 +1,2 @@
+int32[] ids
+geometry_msgs/PoseStamped[] poses

+ 46 - 0
ost.txt

@@ -0,0 +1,46 @@
+# oST version 5.0 parameters
+
+
+[image]
+
+width
+1920
+
+height
+1080
+
+[narrow_stereo]
+
+camera matrix
+879.128576 0.000000 949.931895
+0.000000 879.140252 502.593617
+0.000000 0.000000 1.000000
+
+distortion
+-0.009589 -0.006460 -0.003711 0.000157 0.000000
+
+rectification
+1.000000 0.000000 0.000000
+0.000000 1.000000 0.000000
+0.000000 0.000000 1.000000
+
+projection
+859.813171 0.000000 949.283941 0.000000
+0.000000 873.249756 497.956650 0.000000
+0.000000 0.000000 1.000000 0.000000
+
+
+[ INFO][2021-11-25 08:36:26 960672] [/camera_node] line:46  pp:  point [897.281046 , 536.470588]
+[ INFO][2021-11-25 08:36:26 961622] [/camera_node] line:46  pp:  point [841.635659 , 546.348837]
+[ INFO][2021-11-25 08:36:26 961647] [/camera_node] line:46  pp:  point [931.635135 , 558.432432]
+[ INFO][2021-11-25 08:36:26 961661] [/camera_node] line:46  pp:  point [943.522013 , 620.251572]
+[ INFO][2021-11-25 08:36:26 961672] [/camera_node] line:46  pp:  point [916.062893 , 625.779874]
+[ INFO][2021-11-25 08:36:26 961685] [/camera_node] line:46  pp:  point [858.679012 , 638.419753]
+
+
+[ INFO][2021-11-25 08:37:58 915604] [/camera_node] line:46  pp:  point [858.676086 , 638.428711]
+[ INFO][2021-11-25 08:37:58 916374] [/camera_node] line:46  pp:  point [916.084229 , 625.785828]
+[ INFO][2021-11-25 08:37:58 916393] [/camera_node] line:46  pp:  point [943.525208 , 620.268555]
+[ INFO][2021-11-25 08:37:58 916406] [/camera_node] line:46  pp:  point [931.643433 , 558.426331]
+[ INFO][2021-11-25 08:37:58 916418] [/camera_node] line:46  pp:  point [841.665161 , 546.358948]
+[ INFO][2021-11-25 08:37:58 916430] [/camera_node] line:46  pp:  point [897.303284 , 536.466187]

+ 27 - 0
ost.yaml

@@ -0,0 +1,27 @@
+image_width: 1920
+image_height: 1080
+camera_name: narrow_stereo
+
+camera_matrix:
+  rows: 3
+  cols: 3
+  data: [879.12858,   0.     , 949.9319 ,
+           0.     , 879.14025, 502.59362,
+           0.     ,   0.     ,   1.     ]
+distortion_model: plumb_bob
+distortion_coefficients:
+  rows: 1
+  cols: 5
+  data: [-0.009589, -0.006460, -0.003711, 0.000157, 0.000000]
+rectification_matrix:
+  rows: 3
+  cols: 3
+  data: [1., 0., 0.,
+         0., 1., 0.,
+         0., 0., 1.]
+projection_matrix:
+  rows: 3
+  cols: 4
+  data: [859.81317,   0.     , 949.28394,   0.     ,
+           0.     , 873.24976, 497.95665,   0.     ,
+           0.     ,   0.     ,   1.     ,   0.     ]

+ 80 - 0
package.xml

@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<package format="2">
+  <name>top_mark</name>
+  <version>0.0.0</version>
+  <description>The top_mark package</description>
+
+  <!-- One maintainer tag required, multiple allowed, one person per tag -->
+  <!-- Example:  -->
+  <!-- <maintainer email="jane.doe@example.com">Jane Doe</maintainer> -->
+  <maintainer email="lenotary@todo.todo">lenotary</maintainer>
+
+
+  <!-- One license tag required, multiple allowed, one license per tag -->
+  <!-- Commonly used license strings: -->
+  <!--   BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
+  <license>TODO</license>
+
+
+  <!-- Url tags are optional, but multiple are allowed, one per tag -->
+  <!-- Optional attribute type can be: website, bugtracker, or repository -->
+  <!-- Example: -->
+  <!-- <url type="website">http://wiki.ros.org/top_mark</url> -->
+
+
+  <!-- Author tags are optional, multiple are allowed, one per tag -->
+  <!-- Authors do not have to be maintainers, but could be -->
+  <!-- Example: -->
+  <!-- <author email="jane.doe@example.com">Jane Doe</author> -->
+
+
+  <!-- The *depend tags are used to specify dependencies -->
+  <!-- Dependencies can be catkin packages or system dependencies -->
+  <!-- Examples: -->
+  <!-- Use depend as a shortcut for packages that are both build and exec dependencies -->
+  <!--   <depend>roscpp</depend> -->
+  <!--   Note that this is equivalent to the following: -->
+  <!--   <build_depend>roscpp</build_depend> -->
+  <!--   <exec_depend>roscpp</exec_depend> -->
+  <!-- Use build_depend for packages you need at compile time: -->
+  <!--   <build_depend>message_generation</build_depend> -->
+  <!-- Use build_export_depend for packages you need in order to build against this package: -->
+  <!--   <build_export_depend>message_generation</build_export_depend> -->
+  <!-- Use buildtool_depend for build tool packages: -->
+  <!--   <buildtool_depend>catkin</buildtool_depend> -->
+  <!-- Use exec_depend for packages you need at runtime: -->
+  <!--   <exec_depend>message_runtime</exec_depend> -->
+  <!-- Use test_depend for packages you need only for testing: -->
+  <!--   <test_depend>gtest</test_depend> -->
+  <!-- Use doc_depend for packages you need only for building documentation: -->
+  <!--   <doc_depend>doxygen</doc_depend> -->
+  <buildtool_depend>catkin</buildtool_depend>
+  <build_depend>nav_msgs</build_depend>
+  <build_depend>roscpp</build_depend>
+  <build_depend>rospy</build_depend>
+  <build_depend>sensor_msgs</build_depend>
+  <build_depend>std_msgs</build_depend>
+  <build_depend>message_generation</build_depend>
+  <build_depend>message_runtime</build_depend>
+  <build_export_depend>nav_msgs</build_export_depend>
+  <build_export_depend>roscpp</build_export_depend>
+  <build_export_depend>rospy</build_export_depend>
+  <build_export_depend>sensor_msgs</build_export_depend>
+  <build_export_depend>std_msgs</build_export_depend>
+  <build_export_depend>message_generation</build_export_depend>
+  <build_export_depend>message_runtime</build_export_depend>
+  <exec_depend>nav_msgs</exec_depend>
+  <exec_depend>roscpp</exec_depend>
+  <exec_depend>rospy</exec_depend>
+  <exec_depend>sensor_msgs</exec_depend>
+  <exec_depend>std_msgs</exec_depend>
+  <exec_depend>message_generation</exec_depend>
+  <exec_depend>message_runtime</exec_depend>
+
+
+  <!-- The export tag contains other, unspecified, tags -->
+  <export>
+    <!-- Other tools can request additional information be placed here -->
+
+  </export>
+</package>

+ 12 - 0
script/camera_read.py

@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import cv2
+
+cap = cv2.VideoCapture(0)
+
+while True:
+
+    success, frame = cap.read()
+    cv2.imshow("aa", frame)
+    if (cv2.waitKey(1) & 0xFF == ord('q')):
+        break

BIN
script/chessboard.bmp


BIN
script/chessboard.docx


+ 91 - 0
script/generateChessboard.py

@@ -0,0 +1,91 @@
+# -*- coding: utf-8  -*-
+import io
+import os
+import sys
+
+from docx import Document
+from docx.shared import Inches, Cm
+import cv2
+import numpy as np
+
+scale = 37.79527559055118  # word的cm与px的比例(别问为什么, 自己去测)
+
+
+def main(param):
+    docx_name, chessboard_cm, width_cm, height_cm = param
+    chessboard_pixel = chessboard_cm*scale
+    width = round(width_cm / chessboard_cm)
+    height = round(height_cm / chessboard_cm)
+    # width_pix = (width + 1) * chessboard_pixel
+    # height_pix = (height + 1) * chessboard_pixel
+    width_pix = round(width_cm * scale)
+    height_pix = round(height_cm * scale)
+
+    size = width_cm/height_cm
+
+    image = np.zeros((height_pix, width_pix, 3), dtype=np.uint8)
+    image.fill(255)
+    color = (255, 255, 255)
+    fill_color = 0
+    for j in range(0, height + 1):
+        y = round(j * chessboard_pixel)
+        for i in range(0, width + 1):
+            x0 = round(i * chessboard_pixel)
+            y0 = y
+            rect_start = (x0, y0)
+
+            x1 = round(x0 + chessboard_pixel)
+            y1 = round(y0 + chessboard_pixel)
+            rect_end = (x1, y1)
+            cv2.rectangle(image, rect_start, rect_end, color, 1, 0)
+            image[y0:y1, x0:x1] = fill_color
+            if width % 2:
+                if i != width:
+                    fill_color = (0 if (fill_color == 255) else 255)
+            else:
+                if i != width + 1:
+                    fill_color = (0 if (fill_color == 255) else 255)
+    bottom = round(width_pix/size)
+    if bottom < height_pix:
+        image = image[0:bottom, :, :]
+
+    # 创建显示窗口
+    win_name = "chessboard"
+    # cv.namedWindow(win_name, cv.WINDOW_NORMAL)
+    cv2.imwrite(win_name + ".bmp", image)
+    # cv.imshow(win_name, image)
+    # cv.waitKey()
+
+    doc = Document()  # 以默认模板建立文档对象
+    distance = Inches(0)
+    sec = doc.sections[0]
+    sec.left_margin = distance  # 以下依次设置左、右、上、下页面边距
+    sec.right_margin = distance
+    sec.top_margin = distance
+    sec.bottom_margin = distance
+
+    sec.page_width = Cm(width_cm)  # 设置页面宽度
+    sec.page_height = Cm(height_cm)  # 设置页面高度
+
+    img_encode = cv2.imencode('.bmp', image)[1]
+    str_encode = img_encode .tostring()
+    cc = io.BytesIO(str_encode)
+    # img = doc.add_picture(cc, Cm(42.01))
+    doc.add_picture(cc)
+    # doc.add_picture(win_name + ".bmp")
+    doc.save(docx_name)       # 保存图像
+
+
+if __name__ == '__main__':
+    param = ['chessboard.docx', 4, 42, 29.7]  # 文档名, 正方格长度cm, 页面宽度cm, 页面高度cm
+    length = len(sys.argv)
+    if length > 1:
+        for idx in range(1, 5 if (length > 5) else length):
+            param[idx - 1] = sys.argv[idx]
+        strlist = os.path.splitext(str(param[0]))
+        if len(strlist) == 1:
+            param[0] = str(param[0]) + '.docx'
+        elif strlist[-1] != 'docx':
+            param[0] = ''.join(strlist[0:-1]) + '.docx'
+        # print(param)
+    main(param)

+ 19 - 0
src/.vscode/c_cpp_properties.json

@@ -0,0 +1,19 @@
+{
+  "configurations": [
+    {
+      "browse": {
+        "databaseFilename": "",
+        "limitSymbolsToIncludedHeaders": true
+      },
+      "includePath": [
+        "/home/lenotary/work/code/top_mark_ws/devel/include/**",
+        "/opt/ros/noetic/include/**",
+        "/home/lenotary/work/code/top_mark_ws/src/top_mark/include/**",
+        "/home/lenotary/work/code/top_mark_ws/src/zed_driver/include/**",
+        "/usr/include/**"
+      ],
+      "name": "ROS"
+    }
+  ],
+  "version": 4
+}

+ 6 - 0
src/.vscode/settings.json

@@ -0,0 +1,6 @@
+{
+    "python.autoComplete.extraPaths": [
+        "/home/lenotary/work/code/top_mark_ws/devel/lib/python3/dist-packages",
+        "/opt/ros/noetic/lib/python3/dist-packages"
+    ]
+}

+ 2 - 0
src/camera.cpp

@@ -0,0 +1,2 @@
+#include "camera.h"
+

+ 29 - 0
src/camera.h

@@ -0,0 +1,29 @@
+/**
+ * \file camera_read.h
+ * \brief 简要介绍
+ * \details 详细说明
+ * \author lenotary
+ * \version 1.0
+ * \date 2021/11/22
+ * \pre 先决条件。(有的话才添加。)
+ * \bug 存在的bug。(有的话才添加,注明“还未发现”也可以。)
+ * \warning 警告。(有的话才添加。)
+ * \copyright 版权声明。(通常只写遵循什么协议,版权详细内容应放在LICENSE文件中,并且应该与LICENSE文件的内容统一,不能自相矛盾。)
+ * \since 一些历史情况记录。(有的话才添加。)
+ */
+
+
+//
+// Created by lenotary on 2021/11/22.
+//
+
+#ifndef TOP_MARK_CAMERA_H
+#define TOP_MARK_CAMERA_H
+
+#include <ros/ros.h>
+#include <sensor_msgs/Image.h>
+class Camera{
+public:
+
+};
+#endif //TOP_MARK_CAMERA_H

+ 21 - 0
src/gtsam_test.cpp

@@ -0,0 +1,21 @@
+/**
+ * @file Pose3SLAMExample_initializePose3.cpp
+ * @brief A 3D Pose SLAM example that reads input from g2o, and initializes the Pose3 using InitializePose3
+ * Syntax for the script is ./Pose3SLAMExample_changeKeys input.g2o rewritted.g2o
+ * @date Aug 25, 2014
+ * @author Luca Carlone
+ */
+
+#include <gtsam/slam/dataset.h>
+#include <gtsam/slam/BetweenFactor.h>
+#include <gtsam/slam/PriorFactor.h>
+#include <fstream>
+
+using namespace std;
+using namespace gtsam;
+
+int main(const int argc, const char *argv[]) {
+
+
+    return 0;
+}

+ 510 - 0
src/mark_extract.cpp

@@ -0,0 +1,510 @@
+/**
+ * \file mark_extract.cpp
+ * \brief 简要介绍
+ * \details 详细说明
+ * \author lenotary
+ * \version 1.0
+ * \date 2021/11/24
+ * \pre 先决条件。(有的话才添加。)
+ * \bug 存在的bug。(有的话才添加,注明“还未发现”也可以。)
+ * \warning 警告。(有的话才添加。)
+ * \copyright 版权声明。(通常只写遵循什么协议,版权详细内容应放在LICENSE文件中,并且应该与LICENSE文件的内容统一,不能自相矛盾。)
+ * \since 一些历史情况记录。(有的话才添加。)
+ */
+
+//
+// Created by lenotary on 2021/11/24.
+//
+
+#include "mark_extract.h"
+
+MarkExtract::MarkExtract(ros::NodeHandle &nh, ros::NodeHandle &nhPrivate) : m_iNh_(nh), m_iNhPrivate_(nhPrivate) {
+
+    nhPrivate.getParam("image_width", m_nWidth_);
+    nhPrivate.getParam("image_height", m_nHeight_);
+    nhPrivate.getParam("threshold", m_nThresh_);
+    nhPrivate.getParam("point_diameter", m_dPointSize_);
+    nhPrivate.getParam("point_distance", m_dPointDistance_);
+
+    m_dMinPointArea_ = std::pow(m_dPointSize_ / 2,2) * M_PI / 2  * 15 * m_nWidth_;
+    m_dMinPointWidth_ = m_dPointSize_ * m_nWidth_ * 0.26;
+    ROS_INFO("min %f %f",m_dMinPointArea_,m_dMinPointWidth_);
+    XmlRpc::XmlRpcValue scanner_params;
+    std::vector<double> K, D;
+    nhPrivate.param<std::vector<double>>("camera_matrix/data", K, std::vector<double>());
+    nhPrivate.param<std::vector<double>>("distortion_coefficients/data", D, std::vector<double>());
+    m_mK_ = (cv::Mat_<double>(3, 3) << K[0], K[1], K[2], K[3], K[4], K[5], K[6], K[7], K[8]);
+    m_mD_ = (cv::Mat_<double>(5, 1) << D[0], D[1], D[2], D[3], D[4]);
+    const double alpha = 1;
+    cv::Size imageSize(m_nWidth_, m_nHeight_);
+    m_mNewCameraMatrix_ = getOptimalNewCameraMatrix(m_mK_, m_mD_, imageSize, alpha, imageSize, 0);
+
+//    cv::Mat UndistortImage;
+//    cv::undistort(RawImage, UndistortImage, m_mK_, m_mD_, m_mK_);
+//        cv::undistort(RawImage, UndistortImage, K, D, NewCameraMatrix);
+
+}
+
+void MarkExtract::process(cv::Mat &image) {
+    cv::Size imageSize(m_nWidth_, m_nHeight_);
+    cv::undistort(image, m_mDistortionImage, m_mK_, m_mD_, m_mK_);
+//        cv::undistort(image, UndistortImage, K, D, NewCameraMatrix);
+    cv::Mat gray, binaryImage;
+    cv::cvtColor(m_mDistortionImage, gray, cv::COLOR_RGB2GRAY);   //要二值化图像,要先进行灰度化处理
+    threshold(gray, m_mBinary, m_nThresh_, 255, cv::THRESH_BINARY);
+//    int s = 3 * 2 + 1;
+//    cv::Mat structureElement = getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(s, s), cv::Point(-1, -1));      //创建结构元
+//    dilate(m_mBinary, m_mBinary, structureElement, cv::Point(-1, -1), 1);               //调用膨胀API
+
+//新方法获取联通区域中心店
+    cv::Mat outImage, stats, centroids;
+    int count = connectedComponentsWithStats(m_mBinary, outImage, stats, centroids, 8, CV_16U);
+//    cv::RNG rng(time(nullptr));
+//    std::vector<cv::Vec3b> colors;
+//    for (int i = 0; i < count; i++) {
+//        cv::Vec3b vec3 = cv::Vec3b(rng.uniform(0, 256), rng.uniform(0, 256), rng.uniform(0, 256));
+//        colors.push_back(vec3);
+//    }
+    cv::Mat dst = cv::Mat::zeros(gray.size(), gray.type());
+    int width = dst.cols;
+    int height = dst.rows;
+    std::vector<cv::Point2d> mc;
+    for (int i = 1; i < count; i++) {
+        //找到连通域的质心
+        cv::Point2d center;
+        center.x = centroids.at<double>(i, 0);
+        center.y = centroids.at<double>(i, 1);
+        //矩形的点和边
+        int x = stats.at<int>(i, cv::CC_STAT_LEFT);
+        int y = stats.at<int>(i, cv::CC_STAT_TOP);
+        int w = stats.at<int>(i, cv::CC_STAT_WIDTH);
+        int h = stats.at<int>(i, cv::CC_STAT_HEIGHT);
+        int area = stats.at<int>(i, cv::CC_STAT_AREA) / 10;
+//        ROS_INFO("info  i %d  : %d %d %d %d %d ",i,w,h,x,y,area);
+//        ROS_INFO("cc %f", 5 / 0.015/ m_nWidth_);
+
+        if (std::abs(w - h) < 5 and w > m_dMinPointWidth_ and area > m_dMinPointArea_) {   // 5 /  std::pow(m_dPointSize_ / 2,2) * M_PI / 2 * m_nWidth_ / m_nWidth_
+
+            mc.push_back(center);
+        }
+
+
+        //绘制中心点
+//        circle(image, center, 1, cv::Scalar(0, 255, 122), 2, 8, 0);
+        //外接矩形
+//        cv::Rect rect(x, y, w, h);
+//        rectangle(image, rect, colors[i], 1, 8, 0);
+        //putText(image, format("%d", i), Point(center_x, center_y), FONT_HERSHEY_COMPLEX, 0.5, Scalar(0, 0, 255), 1);
+    }
+//    ROS_INFO("size of center %ld",mc.size());
+//    for (auto p: mc) {
+//        printPoint(p, "pp");
+//    }
+
+//原始方法获取 联通区域中心点
+/*    std::vector<std::vector<cv::Point> > contours;//contours的类型,双重的vector
+    std::vector<cv::Vec4i> hierarchy;//Vec4i是指每一个vector元素中有四个int型数据。
+
+
+    findContours(binaryImage.clone(), contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE, cv::Point(0, 0));
+    std::vector<cv::Moments> mu(contours.size());
+    for (int i = 0; i < contours.size(); i++) {
+        mu[i] = moments(contours[i], false);
+    }
+    ///  计算中心矩:
+    std::vector<cv::Point2f> mc;
+    for (int i = 0; i < contours.size(); i++) {
+
+        double x = mu[i].m10 / mu[i].m00;
+        double y = mu[i].m01 / mu[i].m00;
+        if (x > 0 and x < 10000 and y > 0 and y < 10000) {
+            mc.emplace_back(mu[i].m10 / mu[i].m00, mu[i].m01 / mu[i].m00);
+        }
+    }
+    if(mc.size() > 16){
+        return;
+    }*/
+
+    auto allPints = DBSCAN(mc,200,3);
+    m_vIds.clear();
+    m_vTS.clear();
+    m_vRS.clear();
+
+    for(auto map_iter: allPints){
+        cv::Mat r,t;
+        auto points = removeOutlier(map_iter.second,500,3);
+        
+        if(points.size() < 5){
+            return;
+        }
+
+        auto id = getIdAndPose(points,r,t);
+        m_vIds.push_back(id);
+        m_vTS.push_back(t);
+        m_vRS.push_back(r);
+    }
+
+//    ROS_INFO("id %d",cc);
+//    cv::imshow("aa", m_mBinary_);
+//    cv::waitKey(10);
+    /// 绘制轮廓
+//    cv::Mat drawing = cv::Mat::zeros(binaryImage.size(), CV_8UC1);
+//    for (int i = 0; i < cc.size(); i++) {
+//        cv::Scalar color = cv::Scalar(255);
+////        drawContours(drawing, contours, i, color, 2, 8, hierarchy, 0, cv::Point());
+//        circle(drawing, cc[i], 4, color, -1, 8, 0);
+//    }
+
+}
+
+int MarkExtract::getIdAndPose(std::vector<cv::Point2d> &mc,cv::Mat &r,cv::Mat &t) {
+    bool getTruePoint{false};
+    std::vector<PointDisSet> pointDisList;
+    std::vector<std::vector<cv::Point2d>> allPoints;
+
+    cv::Point2d A, B, C, D, E;
+
+    PointDisSet pointDis;
+    cv::Point2d image_center(0,0);
+
+    std::vector<cv::Point2d> cameraPoints;
+    for (auto p: mc) {
+        auto cameraP = pixToCamera(p);
+        cameraPoints.push_back(cameraP);
+    }
+
+    for (int i = 0; i < cameraPoints.size(); ++i) {
+        for (int j = 0; j < cameraPoints.size(); ++j) {
+            auto point1 = cameraPoints[i];
+            auto point2 = cameraPoints[j];
+//            auto dis = std::hypot(point1.x - point2.x, point1.y - point2.y);
+            auto dis = distanceTwoPoint(point1,point2);
+            pointDisList.emplace_back(dis,point1,point2);
+        }
+    }
+    int count = 1;
+    while (not getTruePoint and count < pointDisList.size()){
+        count ++;
+
+        double maxDis{0};
+        for(const auto& twoPoint: pointDisList){
+            if(twoPoint.mDis > maxDis){
+                maxDis = twoPoint.mDis;
+                pointDis = twoPoint;
+            }
+        }
+
+        for(auto& twoPoint: pointDisList){
+            if(twoPoint.mDis == maxDis){
+                twoPoint.mDis = 0;
+            }
+        }
+        cv::Point2d center, origin;
+        maxDis = 0;
+        center.x = (pointDis.mP1.x + pointDis.mP2.x) / 2.0;
+        center.y = (pointDis.mP1.y + pointDis.mP2.y) / 2.0;
+        for (const auto &p: cameraPoints) {
+            auto dis = distanceTwoPoint(p,center);
+//            auto dis = std::hypot(p.x - center.x, p.y - center.y);
+            if (dis > maxDis and p!=pointDis.mP1 and p !=pointDis.mP2) {
+                origin = p;
+                maxDis = dis;
+            }
+        }
+        std::vector<cv::Point2d> out;
+
+        double angle1 = std::atan2(pointDis.mP1.y - origin.y, pointDis.mP1.x - origin.x) * 180 / 3.1415926;
+        double angle2 = std::atan2(pointDis.mP2.y - origin.y, pointDis.mP2.x - origin.x) * 180 / 3.1415926;
+        E = origin;
+        if(angle1 * angle2 > 0){
+            if (angle1 > angle2) {
+                A = pointDis.mP1;
+                B = pointDis.mP2;
+            } else {
+                A = pointDis.mP2;
+                B = pointDis.mP1;
+            }
+        } else {
+            if(angle1 > angle2) {
+                if(angle1 - angle2 < 180){
+                    A = pointDis.mP1;
+                    B = pointDis.mP2;
+                }else{
+                    A = pointDis.mP2;
+                    B = pointDis.mP1;
+                }
+            }else if(angle2 > angle1 ){
+                if(angle2 - angle1 < 180){
+                    A = pointDis.mP2;
+                    B = pointDis.mP1;
+                }else{
+                    A = pointDis.mP1;
+                    B = pointDis.mP2;
+                }
+            }
+        }
+
+
+
+        double x_length_x = (B.x - E.x) / 3.0;
+        double x_length_y = (B.y - E.y) / 3.0;
+        double y_length_x = (A.x - E.x) / 3.0;
+        double y_length_y = (A.y - E.y) / 3.0;
+        for (int i = 0; i < 4; ++i) {
+            std::vector<cv::Point2d> colPoints;
+            for (int j = 0; j < 4; ++j) {
+                cv::Point2d p;
+                p.x = E.x + x_length_x * i + y_length_x * j;
+                p.y = E.y + x_length_y * i + y_length_y * j;
+//            out.push_back(cameraToPix(p));
+                colPoints.push_back(p);
+            }
+            allPoints.push_back(colPoints);
+        }
+        C = allPoints[2][3];
+        D = allPoints[3][2];
+
+        C = getRecentPoint(A,B,C,cameraPoints);
+        D = getRecentPoint(A,B,D,cameraPoints);
+        if(C.x == -100000 or D.x == -100000 or C == D){
+            getTruePoint = false;
+        }else{
+            getTruePoint = true;
+
+        }
+
+        auto cPix = cameraToPix(C);
+        auto dPix = cameraToPix(D);
+    }
+    if(not getTruePoint){
+        return -1;
+    }
+    int id{0};
+    id += (getRecentPoint(A,B,allPoints[1][0],cameraPoints).x > -100000) << 0;
+    id += (getRecentPoint(A,B,allPoints[2][0],cameraPoints).x > -100000) << 1;
+    id += (getRecentPoint(A,B,allPoints[0][1],cameraPoints).x > -100000) << 2;
+    id += (getRecentPoint(A,B,allPoints[1][1],cameraPoints).x > -100000) << 3;
+    id += (getRecentPoint(A,B,allPoints[2][1],cameraPoints).x > -100000) << 4;
+    id += (getRecentPoint(A,B,allPoints[3][1],cameraPoints).x > -100000) << 5;
+    id += (getRecentPoint(A,B,allPoints[0][2],cameraPoints).x > -100000) << 6;
+    id += (getRecentPoint(A,B,allPoints[1][2],cameraPoints).x > -100000) << 7;
+    id += (getRecentPoint(A,B,allPoints[2][2],cameraPoints).x > -100000) << 8;
+    id += (getRecentPoint(A,B,allPoints[1][3],cameraPoints).x > -100000) << 9;
+
+
+    std::vector<cv::Point2d> imagePoints;
+    imagePoints.push_back(cameraToPix(E));
+    imagePoints.push_back(cameraToPix(B));
+    imagePoints.push_back(cameraToPix(A));
+    imagePoints.push_back(cameraToPix(C));
+    imagePoints.push_back(cameraToPix(D));
+    std::vector<cv::Point3d> objectPoints;
+    objectPoints.emplace_back(0.0, 0.0, 0.0);
+    objectPoints.emplace_back(cv::Point3d(m_dPointDistance_ * 3, 0.0, 0.0));
+    objectPoints.emplace_back(cv::Point3d(0.0, m_dPointDistance_ * 3, 0.0));
+    objectPoints.emplace_back(cv::Point3d(m_dPointDistance_ * 2 , m_dPointDistance_ * 3, 0.0));
+    objectPoints.emplace_back(cv::Point3d(m_dPointDistance_ * 3, m_dPointDistance_ * 2, 0.0));
+//    objectPoints.push_back(cv::Point3d(0.0,0.0,0.15));
+    cv::Mat distCoeffs = (cv::Mat_<double>(5, 1) << 0, 0, 0, 0, 0);
+    cv::Mat rvecs = cv::Mat::zeros(3, 1, CV_64FC1);
+    cv::Mat tvecs = cv::Mat::zeros(3, 1, CV_64FC1);
+    cv::solvePnP(objectPoints, imagePoints, m_mK_, distCoeffs, rvecs, tvecs, false);
+
+ //可视化用
+
+    std::vector<cv::Point2d> reference_ImagePoint;
+    cv::projectPoints(objectPoints, rvecs, tvecs, m_mK_, distCoeffs, reference_ImagePoint);
+    m_mR = rvecs;
+    m_mT = tvecs;
+    cv::line(m_mDistortionImage, reference_ImagePoint[0], reference_ImagePoint[1], cv::Scalar(0, 0, 255));
+    cv::line(m_mDistortionImage, reference_ImagePoint[0], reference_ImagePoint[2], cv::Scalar(0, 255, 0));
+    cv::line(m_mDistortionImage, reference_ImagePoint[0], reference_ImagePoint[5], cv::Scalar(255, 0, 0));
+    ROS_INFO("rvecs %f %f %f",rvecs.at<double>(0),rvecs.at<double>(1),rvecs.at<double>(2));
+//    ROS_INFO("tvecs %f %f %f",tvecs.at<double>(0),tvecs.at<double>(1),tvecs.at<double>(2));
+    for (auto & i : mc) {
+        cv::Scalar color = cv::Scalar(255, 255, 0);
+        circle(m_mDistortionImage, i, 4, color, -1, 8, 0);
+    }
+    cv::Scalar color_0 = cv::Scalar(255, 255, 255);
+    cv::Scalar color_a = cv::Scalar(0, 0, 255);
+    cv::Scalar color_b = cv::Scalar(0, 125, 255);
+    cv::Scalar color_c = cv::Scalar(255, 0, 0);
+    cv::Scalar color_d = cv::Scalar(255, 255, 0);
+    cv::Scalar color_e= cv::Scalar(0, 255, 255);
+    circle(m_mDistortionImage, imagePoints[0], 8, color_e, -1, 8, 0);
+    circle(m_mDistortionImage, imagePoints[1], 8, color_b, -1, 8, 0);
+    circle(m_mDistortionImage, imagePoints[2], 8, color_a, -1, 8, 0);
+    circle(m_mDistortionImage, imagePoints[3], 8, color_c, -1, 8, 0);
+    circle(m_mDistortionImage, imagePoints[4], 8, color_d, -1, 8, 0);
+//    circle(m_mDistortionImage_, cameraToPix(image_center), 8, color_0, -1, 8, 0);
+
+//    cv::imshow("aa", m_mDistortionImage_);
+//    cv::waitKey(0);
+    return id;
+}
+
+cv::Point2d MarkExtract::pixToCamera(cv::Point2d &p) {
+
+    cv::Point2d tempP;
+    tempP.x = p.x - m_mK_.at<double>(0, 2);
+    tempP.y = m_mK_.at<double>(1, 2) - p.y;
+
+    return tempP;
+}
+
+cv::Point2d MarkExtract::cameraToPix(cv::Point2d &p) {
+    cv::Point2d tempP;
+    tempP.x = p.x + m_mK_.at<double>(0, 2);
+    tempP.y = m_mK_.at<double>(1, 2) - p.y;
+
+    return tempP;
+}
+
+int MarkExtract::get_char() {
+    fd_set rfds;
+    struct timeval tv;
+    int ch = 0;
+
+    FD_ZERO(&rfds);
+    FD_SET(0, &rfds);
+    tv.tv_sec = 0;
+    tv.tv_usec = 10; //设置等待超时时间
+
+    //检测键盘是否有输入
+    if (select(1, &rfds, NULL, NULL, &tv) > 0){
+        ch = getchar();
+    }
+    return ch;
+}
+
+cv::Point2d MarkExtract::getRecentPoint(cv::Point2d A, cv::Point2d B, cv::Point2d p, std::vector<cv::Point2d> &mc) {
+    double minDis{1000};
+    cv::Point2d recentPoint;
+    for(const auto& comparePoint : mc){
+//        auto dis = std::hypot(comparePoint.x - p.x, comparePoint.y - p.y);
+        auto dis = distanceTwoPoint(comparePoint,p);
+        if(dis < minDis){
+            minDis = dis;
+            recentPoint = comparePoint;
+        }
+    }
+    auto recentDis = distanceTwoPoint(recentPoint,p);
+
+    if(recentDis > distanceTwoPoint(A,B) / 10){
+        recentPoint.x = -100000;  //设置一个很小的值,判断是否获取到了正确的点;
+    }
+    return recentPoint;
+}
+
+std::vector<cv::Point2d> MarkExtract::removeOutlier(std::vector<cv::Point2d> &mc, double radius, int k) {
+    std::vector<cv::Point2d> points;
+    for (int i = 0; i < mc.size(); ++i) {
+        cv::Point2d p1 = mc[i];
+        int count{0};
+        for (int j = 0; j < mc.size(); ++j) {
+            if(i != j){
+                auto p2 = mc[j];
+                auto dis = distanceTwoPoint(p1,p2);
+                if(dis < radius){
+                    count +=1;
+                }
+            }
+        }
+        if(count > k){
+            points.push_back(p1);
+        }
+    }
+    return points;
+}
+
+
+std::map<int,std::vector<cv::Point2d>> MarkExtract::DBSCAN(std::vector<cv::Point2d> dataset,float Eps,int MinPts){
+    int len = dataset.size();
+    std::map<int,std::vector<cv::Point2d>> allPoints;
+    allPoints.clear();
+    std::vector<Point> data;
+    for (int i = 0; i < len; ++i) {
+        data.emplace_back(dataset[i],i);
+    }
+
+    //calculate pts
+    ROS_INFO("计算像素");
+    for(int i=0;i<len;i++){
+        for(int j=i+1;j<len;j++){
+            if(distanceTwoPoint(data[i].m_mPoint,data[j].m_mPoint)<Eps)
+                data[i].m_nPts++;
+            data[j].m_nPts++;
+        }
+    }
+    //core point
+    std::cout<<"core point "<<std::endl;
+    std::vector<Point> corePoint;
+    for(int i=0;i<len;i++){
+        if(data[i].m_nPts>=MinPts) {
+            data[i].m_nPointType = 3;
+            corePoint.push_back(data[i]);
+        }
+    }
+    ROS_INFO("添加核心点");
+    //joint core point
+    for(int i=0;i<corePoint.size();i++){
+        for(int j=i+1;j<corePoint.size();j++){
+            if(distanceTwoPoint(corePoint[i].m_mPoint,corePoint[j].m_mPoint)<Eps){
+                corePoint[i].corepts.push_back(j);
+                corePoint[j].corepts.push_back(i);
+            }
+        }
+    }
+    for(int i=0;i<corePoint.size();i++){
+        std::stack<Point*> ps;
+        if(corePoint[i].m_Visited == 1) continue;
+        ps.push(&corePoint[i]);
+        Point *v;
+        while(!ps.empty()){
+            v = ps.top();
+            v->m_Visited = 1;
+            ps.pop();
+            for(int corept : v->corepts){
+                if(corePoint[corept].m_Visited==1) continue;
+                corePoint[corept].m_nCluster = corePoint[i].m_nCluster;
+                corePoint[corept].m_Visited = 1;
+                ps.push(&corePoint[corept]);
+            }
+        }
+    }
+    ROS_INFO("边界点,联合边界点到核心点");
+    //border point,joint border point to core point
+    for(int i=0;i<len;i++){
+        if(data[i].m_nPointType==3) continue;
+        for(auto & j : corePoint){
+            if(distanceTwoPoint(data[i].m_mPoint,j.m_mPoint)<Eps) {
+                data[i].m_nPointType = 2;
+                data[i].m_nCluster = j.m_nCluster;
+                break;
+            }
+        }
+    }
+    ROS_INFO("输出");
+    //output
+//    fstream clustering;
+//    clustering.open("clustering.txt",ios::out);
+//    for(auto & i : data){
+//        if(i.m_nPointType == 2)
+//            std::cout<<i.m_mPoint.x<<","<<i.m_mPoint.y<<","<<i.m_nCluster<<"\n";
+//    }
+    for(auto & p : corePoint){
+//        std::cout<<p.m_mPoint.x<<","<<p.m_mPoint.y<<","<<p.m_nCluster<<"\n";
+        if (allPoints.count(p.m_nCluster) == 0){
+            std::vector<cv::Point2d> points;
+            points.push_back(p.m_mPoint);
+            allPoints[p.m_nCluster] = points;
+        }else{
+            allPoints[p.m_nCluster].push_back(p.m_mPoint);
+        }
+    }
+    return allPoints;
+}
+
+double MarkExtract::distanceTwoPoint(cv::Point2d p1,cv::Point2d p2) {
+    return std::hypot(p1.x - p2.x ,p1.y - p2.y);
+}

+ 99 - 0
src/mark_extract.h

@@ -0,0 +1,99 @@
+/**
+ * \file mark_extract.h
+ * \brief 简要介绍
+ * \details 详细说明
+ * \author lenotary
+ * \version 1.0
+ * \date 2021/11/24
+ * \pre 先决条件。(有的话才添加。)
+ * \bug 存在的bug。(有的话才添加,注明“还未发现”也可以。)
+ * \warning 警告。(有的话才添加。)
+ * \copyright 版权声明。(通常只写遵循什么协议,版权详细内容应放在LICENSE文件中,并且应该与LICENSE文件的内容统一,不能自相矛盾。)
+ * \since 一些历史情况记录。(有的话才添加。)
+ */
+
+
+//
+// Created by lenotary on 2021/11/24.
+//
+
+#ifndef SRC_MARK_EXTRACT_H
+#define SRC_MARK_EXTRACT_H
+#include <ros/ros.h>
+#include <opencv2/opencv.hpp>
+
+#include <cstdio>
+#include <cstdlib>
+#include <stack>
+#include <map>
+#include <utility>
+
+class Point{
+public:
+    cv::Point2d m_mPoint;
+    int m_nCluster=0;
+    int m_nPointType=1;//1 noise 2 border 3 core
+    int m_nPts=0;//points in MinPts
+    std::vector<int> corepts;
+    int m_Visited = 0;
+    Point ()= default;
+    Point (cv::Point2d p,int c):m_mPoint(p),m_nCluster(c){}
+};
+
+class PointDisSet{
+public:
+    PointDisSet(){};
+    PointDisSet(double d,cv::Point2d p1, cv::Point2d p2):mDis(d),mP1(std::move(p1)),mP2(std::move(p2)){}
+    double mDis{};
+    cv::Point2d mP1;
+    cv::Point2d mP2;
+};
+
+class MarkExtract {
+public:
+    MarkExtract(ros::NodeHandle &nh, ros::NodeHandle &nhPrivate);
+    void process(cv::Mat &image);
+    int get_char();
+
+    cv::Mat m_mDistortionImage;
+    cv::Mat m_mBinary;
+    cv::Mat m_mR;
+    cv::Mat m_mT;
+
+    std::vector<int> m_vIds;
+    std::vector<cv::Mat> m_vTS;
+    std::vector<cv::Mat> m_vRS;
+    int m_nId{-1};
+
+private:
+    ros::NodeHandle m_iNh_;
+    ros::NodeHandle m_iNhPrivate_;
+    cv::Mat m_mNewCameraMatrix_;
+    cv::Mat m_mK_;
+    cv::Mat m_mD_;
+
+    cv::Size m_iSize_;
+    int m_nWidth_;
+    int m_nHeight_;
+    int m_nThresh_;  //二值化 阈值
+    double m_dPointSize_;  //反光点的大小
+    double m_dPointDistance_;  //反光点的距离
+    double m_dMinPointArea_;  //反光点最小像素面积大小
+    double m_dMinPointWidth_;  //反光点最小像素宽度
+
+
+
+    int getIdAndPose(std::vector<cv::Point2d> &mc,cv::Mat &r,cv::Mat &t);
+    void printPoint(cv::Point2d p,std::string name=""){
+        ROS_INFO(" %s:  point [%f , %f]",name.data(),p.x,p.y);
+    }
+    double distanceTwoPoint(cv::Point2d p1,cv::Point2d p2);
+    cv::Point2d getRecentPoint(cv::Point2d A, cv::Point2d B,cv::Point2d p, std::vector<cv::Point2d> &mc);
+    cv::Point2d pixToCamera(cv::Point2d &p);
+    cv::Point2d cameraToPix(cv::Point2d &p);
+    std::vector<cv::Point2d> removeOutlier(std::vector<cv::Point2d> &mc, double radius, int k);
+    std::map<int,std::vector<cv::Point2d>> DBSCAN(std::vector<cv::Point2d> dataset,float Eps,int MinPts);
+};
+
+
+#endif //SRC_MARK_EXTRACT_H

+ 113 - 0
src/node.cpp

@@ -0,0 +1,113 @@
+#include <V4l2Device.h>
+#include <V4l2Capture.h>
+#include "logger.h"
+#include "opencv2/opencv.hpp"
+
+#include "opencv2/highgui/highgui.hpp"
+#include<image_transport/image_transport.h>
+#include<cv_bridge/cv_bridge.h>
+#include <std_msgs/Int16.h>
+#include <std_msgs/Int32.h>
+#include <top_mark/PoseArray.h>
+#include <ros/ros.h>
+#include <tf/transform_datatypes.h>
+#include "mark_extract.h"
+
+int main(int argc, char **argv) {
+    int verbose = 0;
+    setlocale(LC_CTYPE, "zh_CN.utf8");
+
+    const char *in_devname = "/dev/video0";/* V4L2_PIX_FMT_YUYV V4L2_PIX_FMT_MJPEG*/
+    ros::init(argc, argv, "node");
+    ros::NodeHandle nh;
+    ros::NodeHandle nhPrivate("~");
+    MarkExtract markExtract(nh, nhPrivate);
+    image_transport::ImageTransport it(nh);
+    image_transport::Publisher pub = it.advertise("video_image", 1);
+
+    ros::Publisher pubPose = nh.advertise<top_mark::PoseArray>("mark_pose_id", 1);
+
+    sensor_msgs::ImagePtr msg;
+
+//    markExtract.process(image);
+
+    /*
+     *使用说明,我们读取UVC免驱的摄像头时,应该避免直接使用opencv的videocpature,
+     * 因为简单的API使得我们并不知道我们到底获取的时摄像头的哪种图片格式。应该直接使用Qt v4l2 test benchmark软件去获取我们真正需要的
+     * 图像帧格式。
+     * V4L2_PIX_FMT_MJPEG (MJPEG)
+     */
+
+    V4L2DeviceParameters mparam(in_devname, V4L2_PIX_FMT_MJPEG, 1280, 720, 30,
+                                V4L2_EXPOSURE_MANUAL, 0, 100, verbose);
+    V4l2Capture *videoCapture = V4l2Capture::create(mparam, V4l2Access::IOTYPE_MMAP);
+    timeval tv;
+    double start = ros::Time::now().toSec();
+    int count = 0;
+    bool stop{false};
+    cv::Mat v4l2Mat;
+    while (ros::ok()) {
+        tv.tv_sec = 1;
+        tv.tv_usec = 0;
+        int ret = videoCapture->isReadable(&tv);
+        auto ch = markExtract.get_char();
+        if (ch == 10) {
+            stop = !stop;
+            ROS_INFO("info %d", stop);
+        }
+        if (ret == 1 and !stop) {
+
+            ret = videoCapture->read(v4l2Mat);
+            if (ret != 0) {
+                ROS_ERROR("open camera error");
+                ros::shutdown();
+            } else {
+
+                top_mark::PoseArray poseArray;
+                markExtract.process(v4l2Mat);
+
+                for (int i = 0; i < markExtract.m_vIds.size(); ++i) {
+
+                    if(markExtract.m_vIds[i] != -1){
+                        geometry_msgs::PoseStamped pose;
+                        pose.header.seq = count;
+                        pose.header.frame_id = "camera";
+                        pose.header.stamp = ros::Time::now();
+                        ROS_INFO("aaa %d", markExtract.m_vIds[i]);
+                        auto q = tf::createQuaternionMsgFromRollPitchYaw(markExtract.m_mR.at<double>(0),
+                                                                         markExtract.m_mR.at<double>(1),
+                                                                         markExtract.m_mR.at<double>(2));
+                        pose.pose.orientation = q;
+                        pose.pose.position.x = markExtract.m_mT.at<double>(0);
+                        pose.pose.position.y = markExtract.m_mT.at<double>(1);
+                        pose.pose.position.z = markExtract.m_mT.at<double>(2);
+
+                        poseArray.poses.push_back(pose);
+                        poseArray.ids.push_back(markExtract.m_vIds[i]);
+
+                    }
+                }
+
+//                msg = cv_bridge::CvImage(std_msgs::Header(), "mono8", markExtract.m_mBinary).toImageMsg();
+                msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", markExtract.m_mDistortionImage).toImageMsg();
+                pub.publish(msg);
+                pubPose.publish(poseArray);
+                if (count > 100) {
+                    auto dt = ros::Time::now().toSec() - start;
+                    ROS_INFO("dt %f %f", dt, 100.0 / dt);
+                    start = ros::Time::now().toSec();
+                    count = 0;
+                }
+
+                count++;
+            }
+        } else if (ret == -1) { //返回错误
+            ROS_INFO("stop");
+            ros::shutdown();
+
+        } else if (ret == 0) { // 返回超时
+
+        }
+        ros::Duration(0.01).sleep();
+    }
+}

+ 20 - 0
src/v4l2_cv_mat/V4l2Access.cpp

@@ -0,0 +1,20 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Access.cpp
+**
+** V4L2 wrapper
+**
+** -------------------------------------------------------------------------*/
+
+#include "V4l2Access.h"
+
+V4l2Access::V4l2Access(V4l2Device* device) : m_device(device) {
+}
+
+V4l2Access::~V4l2Access() { 
+	delete m_device; 
+}
+

+ 165 - 0
src/v4l2_cv_mat/V4l2Capture.cpp

@@ -0,0 +1,165 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Capture.cpp
+** 
+** V4L2 wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+
+// libv4l2
+#include <linux/videodev2.h>
+
+#include <V4l2Capture.h>
+
+// project
+#include "logger.h"
+#include "V4l2Capture.h"
+#include "V4l2MmapDevice.h"
+#include "V4l2ReadWriteDevice.h"
+#include "opencv2/opencv.hpp"
+
+// -----------------------------------------
+//    create video capture interface
+// -----------------------------------------
+V4l2Capture* V4l2Capture::create(const V4L2DeviceParameters & param, IoType iotype)
+{
+	V4l2Capture* videoCapture = NULL;
+	V4l2Device* videoDevice = NULL; 
+	int caps = V4L2_CAP_VIDEO_CAPTURE;
+	switch (iotype)
+	{
+		case IOTYPE_MMAP: 
+			videoDevice = new V4l2MmapDevice(param, V4L2_BUF_TYPE_VIDEO_CAPTURE); 
+			caps |= V4L2_CAP_STREAMING;
+		break;
+		case IOTYPE_READWRITE:
+			videoDevice = new V4l2ReadWriteDevice(param, V4L2_BUF_TYPE_VIDEO_CAPTURE); 
+			caps |= V4L2_CAP_READWRITE;
+		break;
+	}
+	
+	if (videoDevice &&  !videoDevice->init(caps))
+	{
+		delete videoDevice;
+		videoDevice=NULL; 
+	}
+	
+	if (videoDevice)
+	{
+		videoCapture = new V4l2Capture(videoDevice);
+	}	
+	return videoCapture;
+}
+
+// -----------------------------------------
+//    constructor
+// -----------------------------------------
+V4l2Capture::V4l2Capture(V4l2Device* device) : V4l2Access(device)
+{
+}
+
+// -----------------------------------------
+//    destructor
+// -----------------------------------------
+V4l2Capture::~V4l2Capture() 
+{
+}
+
+// -----------------------------------------
+//    check readability
+// -----------------------------------------
+int V4l2Capture::isReadable(timeval* tv)
+{
+	int fd = m_device->getFd();
+	fd_set fdset;
+	FD_ZERO(&fdset);	
+	FD_SET(fd, &fdset);
+    return select(fd+1, &fdset, NULL, NULL, tv);
+}
+
+const char *V4l2Capture::getBusInfo()
+{
+    return (const char *)this->m_device->getBusInfo();
+}
+
+// -----------------------------------------
+//    read from V4l2Device
+// -----------------------------------------
+size_t V4l2Capture::read(char* buffer, size_t bufferSize)
+{
+    return m_device->readInternal(buffer, bufferSize);
+}
+
+int V4l2Capture::read(cv::Mat &readImage)
+{
+    //清除原图像的数据,避免错误
+    if(!readImage.empty()){
+        readImage.release();
+    }
+    char buffer[this->getBufferSize()];
+    int rsize = this->read(buffer, sizeof(buffer));
+    if (rsize == -1)
+    {
+        return -1;
+    }
+    else
+    {
+        /*
+        2.6.1. Packed YUV formats
+        2.6.2. V4L2_PIX_FMT_GREY (‘GREY’)
+        2.6.3. V4L2_PIX_FMT_Y10 (‘Y10 ‘)
+        2.6.4. V4L2_PIX_FMT_Y12 (‘Y12 ‘)
+        2.6.5. V4L2_PIX_FMT_Y10BPACK (‘Y10B’)
+        2.6.6. V4L2_PIX_FMT_Y16 (‘Y16 ‘)
+        2.6.7. V4L2_PIX_FMT_Y16_BE (‘Y16 ‘ | (1 << 31))
+        2.6.8. V4L2_PIX_FMT_Y8I (‘Y8I ‘)
+        2.6.9. V4L2_PIX_FMT_Y12I (‘Y12I’)
+        2.6.10. V4L2_PIX_FMT_UV8 (‘UV8’)
+        2.6.11. V4L2_PIX_FMT_YUYV (‘YUYV’)
+        2.6.12. V4L2_PIX_FMT_UYVY (‘UYVY’)
+        2.6.13. V4L2_PIX_FMT_YVYU (‘YVYU’)
+        2.6.14. V4L2_PIX_FMT_VYUY (‘VYUY’)
+        2.6.15. V4L2_PIX_FMT_Y41P (‘Y41P’)
+        2.6.16. V4L2_PIX_FMT_YVU420 (‘YV12’), V4L2_PIX_FMT_YUV420 (‘YU12’)
+        2.6.17. V4L2_PIX_FMT_YUV420M (‘YM12’), V4L2_PIX_FMT_YVU420M (‘YM21’)
+        2.6.18. V4L2_PIX_FMT_YUV422M (‘YM16’), V4L2_PIX_FMT_YVU422M (‘YM61’)
+        2.6.19. V4L2_PIX_FMT_YUV444M (‘YM24’), V4L2_PIX_FMT_YVU444M (‘YM42’)
+        2.6.20. V4L2_PIX_FMT_YVU410 (‘YVU9’), V4L2_PIX_FMT_YUV410 (‘YUV9’)
+        2.6.21. V4L2_PIX_FMT_YUV422P (‘422P’)
+        2.6.22. V4L2_PIX_FMT_YUV411P (‘411P’)
+        2.6.23. V4L2_PIX_FMT_NV12 (‘NV12’), V4L2_PIX_FMT_NV21 (‘NV21’)
+        2.6.24. V4L2_PIX_FMT_NV12M (‘NM12’), V4L2_PIX_FMT_NV21M (‘NM21’), V4L2_PIX_FMT_NV12MT_16X16
+        2.6.25. V4L2_PIX_FMT_NV12MT (‘TM12’)
+        2.6.26. V4L2_PIX_FMT_NV16 (‘NV16’), V4L2_PIX_FMT_NV61 (‘NV61’)
+        2.6.27. V4L2_PIX_FMT_NV16M (‘NM16’), V4L2_PIX_FMT_NV61M (‘NM61’)
+        2.6.28. V4L2_PIX_FMT_NV24 (‘NV24’), V4L2_PIX_FMT_NV42 (‘NV42’)
+        2.6.29. V4L2_PIX_FMT_M420 (‘M420’)
+
+         * */
+        if(m_device->getFormat() == V4L2_PIX_FMT_YUYV){
+            cv::Mat v4l2Mat = cv::Mat( m_device->getHeight(),m_device->getWidth(), CV_8UC2, (void*)buffer);
+            cv::cvtColor(v4l2Mat,readImage,cv::COLOR_YUV2BGRA_YUYV);
+        }else if(m_device->getFormat() == V4L2_PIX_FMT_MJPEG){
+            cv::Mat v4l2Mat = cv::Mat( 1,m_device->getHeight()*m_device->getWidth(), CV_8UC1, (void*)buffer);
+            readImage = cv::imdecode(v4l2Mat, 1);
+        }else  if(m_device->getFormat() == V4L2_PIX_FMT_H264){
+
+        }else  if(m_device->getFormat() == V4L2_PIX_FMT_NV12){
+            cv::Mat v4l2Mat = cv::Mat (m_device->getHeight()* 3 / 2, m_device->getWidth(), CV_8UC1, (void*)buffer);
+            cv::cvtColor(v4l2Mat,readImage,cv::COLOR_YUV2BGR_NV12);
+        }else if ((m_device->getFormat()  == V4L2_PIX_FMT_BGR24) || (m_device->getFormat() ==  V4L2_PIX_FMT_RGB24)) {
+            readImage = cv::Mat (m_device->getHeight(), m_device->getWidth(), CV_8UC3, (void*)buffer);
+        }else if ((m_device->getFormat()  == V4L2_PIX_FMT_YVU420) || (m_device->getFormat() ==  V4L2_PIX_FMT_YUV420)) {
+            cv::Mat v4l2Mat = cv::Mat (m_device->getHeight()* 3 / 2, m_device->getWidth(), CV_8UC1, (void*)buffer);
+            cv::cvtColor(v4l2Mat,readImage,cv::COLOR_YUV420p2BGR);
+        }
+        return 0;
+    }
+
+}
+
+				

+ 446 - 0
src/v4l2_cv_mat/V4l2Device.cpp

@@ -0,0 +1,446 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Device.cpp
+** 
+** -------------------------------------------------------------------------*/
+
+#include <unistd.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+
+// libv4l2
+#include <linux/videodev2.h>
+
+#include "logger.h"
+
+#include "V4l2Device.h"
+
+std::string fourcc(unsigned int format) {
+    char formatArray[] = {(char) (format & 0xff), (char) ((format >> 8) & 0xff), (char) ((format >> 16) & 0xff),
+                          (char) ((format >> 24) & 0xff), 0};
+    return std::string(formatArray, strlen(formatArray));
+}
+
+// -----------------------------------------
+//    V4L2Device
+// -----------------------------------------
+V4l2Device::V4l2Device(const V4L2DeviceParameters &params, v4l2_buf_type deviceType) : m_params(params), m_fd(-1),
+                                                                                       m_deviceType(deviceType),
+                                                                                       m_bufferSize(0), m_format(0) {
+}
+
+V4l2Device::~V4l2Device() {
+    this->close();
+}
+
+void V4l2Device::close() {
+    if (m_fd != -1)
+        ::close(m_fd);
+
+    m_fd = -1;
+}
+
+// query current format
+void V4l2Device::queryFormat() {
+    struct v4l2_format fmt;
+    memset(&fmt, 0, sizeof(fmt));
+    fmt.type = m_deviceType;
+    if (0 == ioctl(m_fd, VIDIOC_G_FMT, &fmt)) {
+        m_format = fmt.fmt.pix.pixelformat;
+        m_width = fmt.fmt.pix.width;
+        m_height = fmt.fmt.pix.height;
+        m_bufferSize = fmt.fmt.pix.sizeimage;
+
+        LOG(NOTICE) << m_params.m_devName << ":" << fourcc(m_format) << " size:" << m_width << "x" << m_height
+                    << " bufferSize:" << m_bufferSize;
+    }
+}
+
+// intialize the V4L2 connection
+bool V4l2Device::init(unsigned int mandatoryCapabilities) {
+    struct stat sb;
+    if ((stat(m_params.m_devName.c_str(), &sb) == 0) && ((sb.st_mode & S_IFMT) == S_IFCHR)) {
+        if (initdevice(m_params.m_devName.c_str(), mandatoryCapabilities) == -1) {
+            LOG(ERROR) << "Cannot init device:" << m_params.m_devName;
+        }
+    } else {
+        // open a normal file
+        m_fd = open(m_params.m_devName.c_str(), O_WRONLY | O_CREAT | O_TRUNC, S_IRWXU);
+    }
+    return (m_fd != -1);
+}
+
+// intialize the V4L2 device
+int V4l2Device::initdevice(const char *dev_name, unsigned int mandatoryCapabilities) {
+    m_fd = open(dev_name, m_params.m_openFlags);
+    if (m_fd < 0) {
+        LOG(ERROR) << "Cannot open device:" << m_params.m_devName << " " << strerror(errno);
+        this->close();
+        return -1;
+    }
+    /*
+     * 查询video的可输入设备数,和对应的名字
+     */
+    struct v4l2_input input;
+
+    struct v4l2_standard standard;
+
+    memset(&input, 0, sizeof(input));
+
+    //首先获得当前输入的 index,注意只是 index,要获得具体的信息,就的调用列举操作
+    input.index = m_params.m_inputIndex;
+    if (-1 == ioctl(m_fd, VIDIOC_S_INPUT, &input)) {
+        LOG(ERROR) << "can not set " << m_params.m_inputIndex << " input index";
+        return -1;
+    }
+    if (-1 == ioctl(m_fd, VIDIOC_G_INPUT, &input.index)) {
+
+        LOG(ERROR) << "get VIDIOC_G_INPUT error ";
+        return -1;
+    }
+
+
+    //调用列举操作,获得 input.index 对应的输入的具体信息
+
+    if (-1 == ioctl(m_fd, VIDIOC_ENUMINPUT, &input)) {
+        LOG(ERROR) << "Get ”VIDIOC_ENUM_INPUT” error";
+        return -1;
+    }
+    LOG(NOTICE) << "Current input " << input.name << " supports:";
+    memset(&standard, 0, sizeof(standard));
+    standard.index = 0;
+
+    //列举所有的所支持的 standard,如果 standard.id 与当前 input 的 input.std 有共同的
+    //bit flag,意味着当前的输入支持这个 standard,这样将所有驱动所支持的 standard 列举一个
+    //遍,就可以找到该输入所支持的所有 standard 了。
+
+    while (0 == ioctl(m_fd, VIDIOC_ENUMSTD, &standard)) {
+        LOG(NOTICE) << "Device std " << standard.name;
+
+        if (standard.id & input.std)
+            LOG(NOTICE) << standard.name;
+        standard.index++;
+    }
+
+    /* EINVAL indicates the end of the enumeration, which cannot be empty unless this device falls under the USB exception.*/
+
+    if (errno != EINVAL || standard.index == 0) {
+        LOG(NOTICE) << "Get ”VIDIOC_ENUMSTD” error";
+    }
+
+
+    if (checkCapabilities(m_fd, mandatoryCapabilities) != 0) {
+        this->close();
+        return -1;
+    }
+    if (configureFormat(m_fd) != 0) {
+        this->close();
+        return -1;
+    }
+    if (configureExp(m_fd) != 0) {
+        this->close();
+        return -1;
+    }
+    if (configureParam(m_fd) != 0) {
+        this->close();
+        return -1;
+    }
+
+
+    return m_fd;
+}
+
+static std::string frmtype2s(unsigned type) {
+    static const char *types[] = {
+            "Unknown",
+            "Discrete",
+            "Continuous",
+            "Stepwise"
+    };
+
+    if (type > 3)
+        type = 0;
+    return types[type];
+}
+
+static std::string fract2sec(const struct v4l2_fract &f) {
+    char buf[100];
+
+    sprintf(buf, "%.3f s", (1.0 * f.numerator) / f.denominator);
+    return buf;
+}
+
+static std::string fract2fps(const struct v4l2_fract &f) {
+    char buf[100];
+
+    sprintf(buf, "%.3f fps", (1.0 * f.denominator) / f.numerator);
+    return buf;
+}
+
+static void print_frmival(const struct v4l2_frmivalenum &frmival) {
+//    printf("%s\tInterval: %s ", prefix, frmtype2s(frmival.type).c_str());
+
+    if (frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+        LOG(NOTICE) << "Interval: " << frmtype2s(frmival.type) << " " << fract2sec(frmival.discrete) << " ->"
+                    << fract2fps(frmival.discrete);
+    } else if (frmival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+        LOG(NOTICE) << "Interval: " << frmtype2s(frmival.type) << fract2sec(frmival.stepwise.min) << " - "
+                    << fract2sec(frmival.stepwise.max)
+                    << " with step " << fract2sec(frmival.stepwise.step);
+        LOG(NOTICE) << fract2sec(frmival.stepwise.min) << " - " << fract2sec(frmival.stepwise.max)
+                    << " with step " << fract2sec(frmival.stepwise.step);
+    }
+}
+
+// check needed V4L2 capabilities,get the device the basic imformation
+int V4l2Device::checkCapabilities(int fd, unsigned int mandatoryCapabilities) {
+    struct v4l2_capability cap;
+    /*
+        struct v4l2_capability
+
+        {
+
+        u8 driver[16]; // 驱动名字
+        u8 card[32]; // 设备名字
+        u8 bus_info[32]; // 设备在系统中的位置
+        u32 version;// 驱动版本号
+        u32 capabilities;// 设备支持的操作
+        u32 reserved[4]; // 保留字段
+        };
+    */
+    memset(&(cap), 0, sizeof(cap));
+    if (-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap)) {
+        LOG(ERROR) << "Cannot get capabilities for device:" << m_params.m_devName << " " << strerror(errno);
+        return -1;
+    }
+    LOG(NOTICE) << "driver:" << cap.driver << " capabilities:" << std::hex << cap.capabilities << " mandatory:"
+                << mandatoryCapabilities << std::dec;
+    LOG(NOTICE) << "card:" << cap.card << " Bus info: " << cap.bus_info;
+    memcpy(bus_info, cap.bus_info, sizeof(cap.bus_info));
+    /// 检查设备的能力
+    if ((cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) LOG(NOTICE) << m_params.m_devName << " support output";
+    if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+        LOG(NOTICE) << m_params.m_devName << " support capture";
+        /***
+         *
+         * 查询该设备所有支持的功能
+         ***/
+        /*
+            struct v4l2_fmtdesc
+
+            {
+            u32 index;// 要查询的格式序号,应用程序设置
+            enum v4l2_buf_type type; // 帧类型,应用程序设置
+            u32 flags;// 是否为压缩格式
+            u8 description[32]; // 格式名称
+            u32 pixelformat;// 格式
+            u32 reserved[4]; // 保留
+            };
+    */
+        struct v4l2_fmtdesc fmtdesc;
+        fmtdesc.index = 0;
+        fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        LOG(NOTICE) << "capture Support format:";
+//        while(ioctl(m_fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1)
+//        {
+////          printf("\t%d.%s\n",fmtdesc.index+1,fmtdesc.description);
+//          LOG(NOTICE) <<(fmtdesc.index+1) <<"."<<fmtdesc.description<<",pixelformat:"<< fourcc(fmtdesc.pixelformat) ;
+
+//          fmtdesc.index++;
+//        }
+        {//获取摄像头所支持的格式和分辨率
+
+            enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+            struct v4l2_fmtdesc fmtdesc;
+            struct v4l2_frmsizeenum frmsize;
+            struct v4l2_frmivalenum frmival;
+
+            fmtdesc.index = 0;
+            fmtdesc.type = type;
+            while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) >= 0) {
+                LOG(NOTICE) << (fmtdesc.index + 1) << "." << fmtdesc.description << ",supoort resolution:";
+                frmsize.pixel_format = fmtdesc.pixelformat;
+                frmsize.index = 0;
+                while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) >= 0) {
+
+                    if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+                        LOG(NOTICE) << "width:" << frmsize.discrete.width << ",height:" << frmsize.discrete.height;
+                        frmival.index = 0;
+                        frmival.pixel_format = fmtdesc.pixelformat;
+                        frmival.width = frmsize.discrete.width;
+                        frmival.height = frmsize.discrete.height;
+                        while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) >= 0) {
+                            print_frmival(frmival);
+                            frmival.index++;
+                        }
+                    } else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+                        LOG(NOTICE) << "width:" << frmsize.discrete.width << ",height:" << frmsize.discrete.height;
+                    }
+
+                    frmsize.index++;
+                }
+
+                fmtdesc.index++;
+            }
+
+        }
+
+
+    }
+
+    if ((cap.capabilities & V4L2_CAP_READWRITE)) LOG(NOTICE) << m_params.m_devName << " support read/write";
+    if ((cap.capabilities & V4L2_CAP_STREAMING)) LOG(NOTICE) << m_params.m_devName << " support streaming";
+
+    if ((cap.capabilities & V4L2_CAP_TIMEPERFRAME)) LOG(NOTICE) << m_params.m_devName << " support timeperframe";
+
+    if ((cap.capabilities & mandatoryCapabilities) != mandatoryCapabilities) {
+        LOG(ERROR) << "Mandatory capability not available for device:" << m_params.m_devName;
+        return -1;
+    }
+
+    return 0;
+}
+
+// configure capture format 
+int V4l2Device::configureFormat(int fd) {
+    // get current configuration
+    this->queryFormat();
+
+    unsigned int width = m_width;
+    unsigned int height = m_height;
+    if (m_params.m_width != 0) {
+        width = m_params.m_width;
+    }
+    if (m_params.m_height != 0) {
+        height = m_params.m_height;
+    }
+    if ((m_params.m_formatList.size() == 0) && (m_format != 0)) {
+        m_params.m_formatList.push_back(m_format);
+    }
+
+    // try to set format, widht, height
+    std::list<unsigned int>::iterator it;
+    for (it = m_params.m_formatList.begin(); it != m_params.m_formatList.end(); ++it) {
+        unsigned int format = *it;
+        if (this->configureFormat(fd, format, width, height) == 0) {
+            // format has been set
+            // get the format again because calling SET-FMT return a bad buffersize using v4l2loopback
+            this->queryFormat();
+            return 0;
+        }
+    }
+    return -1;
+}
+
+// configure capture format 
+int V4l2Device::configureFormat(int fd, unsigned int format, unsigned int width, unsigned int height) {
+    struct v4l2_format fmt;
+    memset(&(fmt), 0, sizeof(fmt));
+    fmt.type = m_deviceType;
+    fmt.fmt.pix.width = width;
+    fmt.fmt.pix.height = height;
+    fmt.fmt.pix.pixelformat = format;
+    fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+    if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
+        LOG(ERROR) << "Cannot set format:" << fourcc(format) << " for device:" << m_params.m_devName << " "
+                   << strerror(errno);
+        return -1;
+    }
+    if (fmt.fmt.pix.pixelformat != format) {
+        LOG(ERROR) << "Cannot set pixelformat to:" << fourcc(format) << " format is:"
+                   << fourcc(fmt.fmt.pix.pixelformat);
+        return -1;
+    }
+    if ((fmt.fmt.pix.width != width) || (fmt.fmt.pix.height != height)) {
+        LOG(WARN) << "Cannot set size to:" << width << "x" << height << " size is:" << fmt.fmt.pix.width << "x"
+                  << fmt.fmt.pix.height;
+    }
+
+    m_format = fmt.fmt.pix.pixelformat;
+    m_width = fmt.fmt.pix.width;
+    m_height = fmt.fmt.pix.height;
+    m_bufferSize = fmt.fmt.pix.sizeimage;
+
+    LOG(NOTICE) << "Setting " << m_params.m_devName << ":" << fourcc(m_format) << " size:" << m_width << "x" << m_height
+                << " bufferSize:" << m_bufferSize;
+
+    return 0;
+}
+
+// configure capture FPS 
+int V4l2Device::configureParam(int fd) {
+    if (m_params.m_fps != 0) {
+        struct v4l2_streamparm param{};
+        memset(&(param), 0, sizeof(param));
+        param.type = m_deviceType;
+        param.parm.capture.timeperframe.numerator = 1;
+        param.parm.capture.timeperframe.denominator = m_params.m_fps;
+        if (ioctl(fd, VIDIOC_S_PARM, &param) == -1) {
+            LOG(WARN) << "Cannot set param for device:" << m_params.m_devName << " " << strerror(errno);
+        }
+        LOG(NOTICE) << "Setting " << "fps:" << param.parm.capture.timeperframe.numerator << "/"
+                    << param.parm.capture.timeperframe.denominator;
+        LOG(NOTICE) << "Setting " << "nbBuffer:" << param.parm.capture.readbuffers;
+    }
+
+    return 0;
+}
+
+int V4l2Device::configureExp(int fd) {
+    if (m_params.m_exp_mode != 0) {
+        struct v4l2_control ctrl{};
+        int ret;
+        memset(&(ctrl), 0, sizeof(ctrl));
+        ctrl.id = V4L2_CID_EXPOSURE_AUTO;
+        ret = ioctl(fd, VIDIOC_G_CTRL, &ctrl);
+        if (ret < 0) {
+            LOG(ERROR) << "Get exposure auto Type failed";
+            return -1;
+        }
+        LOG(NOTICE) << "Get Exposure Auto Type: " << ctrl.value;
+        ctrl.value = m_params.m_exp_mode;   //设置曝光模式
+        ret = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
+        if (ret < 0) {
+            LOG(ERROR) << "Get exposure auto Type failed";
+            return -1;
+        }
+        ctrl.id = V4L2_CID_EXPOSURE;
+        ret = ioctl(fd, VIDIOC_G_CTRL, &ctrl);
+//        if (ret < 0) {
+//            LOG(ERROR) << "Get exposure failed: " << ret;
+//            return ret;
+//        }
+        LOG(NOTICE) << "Get Exposure : " << ctrl.value;
+        ctrl.value = m_params.m_exp_level;  //设置 曝光等级
+        ret = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
+//        if (ret < 0) {
+//            LOG(ERROR) << "Set exposure failed: " << ret;
+//            return ret;
+//        }
+        ctrl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+        ret = ioctl(fd, VIDIOC_G_CTRL, &ctrl);
+        if (ret < 0) {
+            LOG(ERROR) << "Get exposure abs failed : " << ret;
+            return ret;
+        }
+        LOG(NOTICE) << "Get exposure abs : " << ctrl.value;
+
+        sleep(1);
+        ctrl.value = m_params.m_exp_abs;
+        ret = ioctl(fd, VIDIOC_S_CTRL, &ctrl);
+        if (ret < 0) {
+            LOG(ERROR) << "Set exposure abs failed : " << ret;
+            return ret;
+        }
+
+    }
+    return 0;
+}
+
+

+ 307 - 0
src/v4l2_cv_mat/V4l2MmapDevice.cpp

@@ -0,0 +1,307 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2MmapDevice.cpp
+** 
+** V4L2 source using mmap API
+**
+** -------------------------------------------------------------------------*/
+
+#include <string.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <errno.h> 
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+
+// libv4l2
+#include <linux/videodev2.h>
+
+// project
+#include "logger.h"
+#include "V4l2MmapDevice.h"
+
+V4l2MmapDevice::V4l2MmapDevice(const V4L2DeviceParameters & params, v4l2_buf_type deviceType) : V4l2Device(params, deviceType), n_buffers(0) 
+{
+	memset(&m_buffer, 0, sizeof(m_buffer));
+}
+
+bool V4l2MmapDevice::init(unsigned int mandatoryCapabilities)
+{
+	bool ret = V4l2Device::init(mandatoryCapabilities);
+	if (ret)
+	{
+		ret = this->start();
+	}
+	return ret;
+}
+
+V4l2MmapDevice::~V4l2MmapDevice()
+{
+	this->stop();
+}
+
+
+bool V4l2MmapDevice::start() 
+{
+	LOG(NOTICE) << "Device " << m_params.m_devName;
+
+	bool success = true;
+	struct v4l2_requestbuffers req;
+	memset (&req, 0, sizeof(req));
+	req.count               = V4L2MMAP_NBBUFFER;
+	req.type                = m_deviceType;
+	req.memory              = V4L2_MEMORY_MMAP;
+
+	if (-1 == ioctl(m_fd, VIDIOC_REQBUFS, &req)) 
+	{
+		if (EINVAL == errno) 
+		{
+			LOG(ERROR) << "Device " << m_params.m_devName << " does not support memory mapping";
+			success = false;
+		} 
+		else 
+		{
+			perror("VIDIOC_REQBUFS");
+			success = false;
+		}
+	}
+	else
+	{
+		LOG(NOTICE) << "Device " << m_params.m_devName << " nb buffer:" << req.count;
+		
+		// allocate buffers
+		memset(&m_buffer,0, sizeof(m_buffer));
+		for (n_buffers = 0; n_buffers < req.count; ++n_buffers) 
+		{
+			struct v4l2_buffer buf;
+			memset (&buf, 0, sizeof(buf));
+			buf.type        = m_deviceType;
+			buf.memory      = V4L2_MEMORY_MMAP;
+			buf.index       = n_buffers;
+
+			if (-1 == ioctl(m_fd, VIDIOC_QUERYBUF, &buf))
+			{
+				perror("VIDIOC_QUERYBUF");
+				success = false;
+			}
+			else
+			{
+				LOG(INFO) << "Device " << m_params.m_devName << " buffer idx:" << n_buffers << " size:" << buf.length << " offset:" << buf.m.offset;
+				m_buffer[n_buffers].length = buf.length;
+				if (!m_buffer[n_buffers].length) {
+					m_buffer[n_buffers].length = buf.bytesused;
+				}
+				m_buffer[n_buffers].start = mmap (   NULL /* start anywhere */, 
+											m_buffer[n_buffers].length, 
+											PROT_READ | PROT_WRITE /* required */, 
+											MAP_SHARED /* recommended */, 
+											m_fd, 
+											buf.m.offset);
+
+				if (MAP_FAILED == m_buffer[n_buffers].start)
+				{
+					perror("mmap");
+					success = false;
+				}
+			}
+		}
+
+		// queue buffers
+		for (unsigned int i = 0; i < n_buffers; ++i) 
+		{
+			struct v4l2_buffer buf;
+			memset (&buf, 0, sizeof(buf));
+			buf.type        = m_deviceType;
+			buf.memory      = V4L2_MEMORY_MMAP;
+			buf.index       = i;
+
+			if (-1 == ioctl(m_fd, VIDIOC_QBUF, &buf))
+			{
+				perror("VIDIOC_QBUF");
+				success = false;
+			}
+		}
+
+		// start stream
+		int type = m_deviceType;
+		if (-1 == ioctl(m_fd, VIDIOC_STREAMON, &type))
+		{
+			perror("VIDIOC_STREAMON");
+			success = false;
+		}
+	}
+	return success; 
+}
+
+bool V4l2MmapDevice::stop() 
+{
+	LOG(NOTICE) << "Device " << m_params.m_devName;
+
+	bool success = true;
+	
+	int type = m_deviceType;
+	if (-1 == ioctl(m_fd, VIDIOC_STREAMOFF, &type))
+	{
+		perror("VIDIOC_STREAMOFF");      
+		success = false;
+	}
+
+	for (unsigned int i = 0; i < n_buffers; ++i)
+	{
+		if (-1 == munmap (m_buffer[i].start, m_buffer[i].length))
+		{
+			perror("munmap");
+			success = false;
+		}
+	}
+	
+	// free buffers
+	struct v4l2_requestbuffers req;
+	memset (&req, 0, sizeof(req));
+	req.count               = 0;
+	req.type                = m_deviceType;
+	req.memory              = V4L2_MEMORY_MMAP;
+	if (-1 == ioctl(m_fd, VIDIOC_REQBUFS, &req)) 
+	{
+		perror("VIDIOC_REQBUFS");
+		success = false;
+	}
+	
+	n_buffers = 0;
+	return success; 
+}
+
+size_t V4l2MmapDevice::readInternal(char* buffer, size_t bufferSize)
+{
+	size_t size = 0;
+	if (n_buffers > 0)
+	{
+		struct v4l2_buffer buf;	
+		memset (&buf, 0, sizeof(buf));
+		buf.type = m_deviceType;
+		buf.memory = V4L2_MEMORY_MMAP;
+
+		if (-1 == ioctl(m_fd, VIDIOC_DQBUF, &buf)) 
+		{
+			perror("VIDIOC_DQBUF");
+			size = -1;
+		}
+		else if (buf.index < n_buffers)
+		{
+			size = buf.bytesused;
+			if (size > bufferSize)
+			{
+				size = bufferSize;
+				LOG(WARN) << "Device " << m_params.m_devName << " buffer truncated available:" << bufferSize << " needed:" << buf.bytesused;
+			}
+			memcpy(buffer, m_buffer[buf.index].start, size);
+
+			if (-1 == ioctl(m_fd, VIDIOC_QBUF, &buf))
+			{
+				perror("VIDIOC_QBUF");
+				size = -1;
+			}
+		}
+	}
+	return size;
+}
+
+size_t V4l2MmapDevice::writeInternal(char* buffer, size_t bufferSize)
+{
+	size_t size = 0;
+	if (n_buffers > 0)
+	{
+		struct v4l2_buffer buf;	
+		memset (&buf, 0, sizeof(buf));
+		buf.type = m_deviceType;
+		buf.memory = V4L2_MEMORY_MMAP;
+
+		if (-1 == ioctl(m_fd, VIDIOC_DQBUF, &buf)) 
+		{
+			perror("VIDIOC_DQBUF");
+			size = -1;
+		}
+		else if (buf.index < n_buffers)
+		{
+			size = bufferSize;
+			if (size > buf.length)
+			{
+				LOG(WARN) << "Device " << m_params.m_devName << " buffer truncated available:" << buf.length << " needed:" << size;
+				size = buf.length;
+			}
+			memcpy(m_buffer[buf.index].start, buffer, size);
+			buf.bytesused = size;
+
+			if (-1 == ioctl(m_fd, VIDIOC_QBUF, &buf))
+			{
+				perror("VIDIOC_QBUF");
+				size = -1;
+			}
+		}
+	}
+	return size;
+}
+
+bool V4l2MmapDevice::startPartialWrite(void)
+{
+	if (n_buffers <= 0)
+		return false;
+	if (m_partialWriteInProgress)
+		return false;
+	memset(&m_partialWriteBuf, 0, sizeof(m_partialWriteBuf));
+	m_partialWriteBuf.type = m_deviceType;
+	m_partialWriteBuf.memory = V4L2_MEMORY_MMAP;
+	if (-1 == ioctl(m_fd, VIDIOC_DQBUF, &m_partialWriteBuf))
+	{
+		perror("VIDIOC_DQBUF");
+		return false;
+	}
+	m_partialWriteBuf.bytesused = 0;
+	m_partialWriteInProgress = true;
+	return true;
+}
+
+size_t V4l2MmapDevice::writePartialInternal(char* buffer, size_t bufferSize)
+{
+	size_t new_size = 0;
+	size_t size = 0;
+	if ((n_buffers > 0) && m_partialWriteInProgress)
+	{
+		if (m_partialWriteBuf.index < n_buffers)
+		{
+			new_size = m_partialWriteBuf.bytesused + bufferSize;
+			if (new_size > m_partialWriteBuf.length)
+			{
+				LOG(WARN) << "Device " << m_params.m_devName << " buffer truncated available:" << m_partialWriteBuf.length << " needed:" << new_size;
+				new_size = m_partialWriteBuf.length;
+			}
+			size = new_size - m_partialWriteBuf.bytesused;
+			memcpy(&((char *)m_buffer[m_partialWriteBuf.index].start)[m_partialWriteBuf.bytesused], buffer, size);
+
+			m_partialWriteBuf.bytesused += size;
+		}
+	}
+	return size;
+}
+
+bool V4l2MmapDevice::endPartialWrite(void)
+{
+	if (!m_partialWriteInProgress)
+		return false;
+	if (n_buffers <= 0)
+	{
+		m_partialWriteInProgress = false; // abort partial write
+		return true;
+	}
+	if (-1 == ioctl(m_fd, VIDIOC_QBUF, &m_partialWriteBuf))
+	{
+		perror("VIDIOC_QBUF");
+		m_partialWriteInProgress = false; // abort partial write
+		return true;
+	}
+	m_partialWriteInProgress = false;
+	return true;
+}

+ 106 - 0
src/v4l2_cv_mat/V4l2Output.cpp

@@ -0,0 +1,106 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2Output.cpp
+** 
+** V4L2 wrapper 
+**
+** -------------------------------------------------------------------------*/
+
+#include <string.h>
+
+// libv4l2
+#include <linux/videodev2.h>
+
+// project
+#include "logger.h"
+
+#include "V4l2Output.h"
+#include "V4l2MmapDevice.h"
+#include "V4l2ReadWriteDevice.h"
+
+// -----------------------------------------
+//    create video output interface
+// -----------------------------------------
+V4l2Output* V4l2Output::create(const V4L2DeviceParameters & param, IoType iotype)
+{
+	V4l2Output* videoOutput = NULL;
+	V4l2Device* videoDevice = NULL; 
+	int caps = V4L2_CAP_VIDEO_OUTPUT;
+	switch (iotype)
+	{
+		case IOTYPE_MMAP: 
+			videoDevice = new V4l2MmapDevice(param, V4L2_BUF_TYPE_VIDEO_OUTPUT); 
+			caps |= V4L2_CAP_STREAMING;
+		break;
+		case IOTYPE_READWRITE:
+			videoDevice = new V4l2ReadWriteDevice(param, V4L2_BUF_TYPE_VIDEO_OUTPUT); 
+			caps |= V4L2_CAP_READWRITE;
+		break;
+	}
+	
+	if (videoDevice &&  !videoDevice->init(caps))
+	{
+		delete videoDevice;
+		videoDevice=NULL; 
+	}
+	
+	if (videoDevice)
+	{
+		videoOutput = new V4l2Output(videoDevice);
+	}	
+	return videoOutput;
+}
+
+// -----------------------------------------
+//    constructor
+// -----------------------------------------
+V4l2Output::V4l2Output(V4l2Device* device) : V4l2Access(device)
+{
+}
+
+// -----------------------------------------
+//    destructor
+// -----------------------------------------
+V4l2Output::~V4l2Output() 
+{
+}
+
+// -----------------------------------------
+//    check writability
+// -----------------------------------------
+int V4l2Output::isWritable(timeval* tv)
+{
+	int fd = m_device->getFd();
+	fd_set fdset;
+	FD_ZERO(&fdset);	
+	FD_SET(fd, &fdset);
+	return select(fd+1, NULL, &fdset, NULL, tv);
+}
+
+// -----------------------------------------
+//    write to V4l2Device
+// -----------------------------------------
+size_t V4l2Output::write(char* buffer, size_t bufferSize)
+{
+	return m_device->writeInternal(buffer, bufferSize);
+}
+
+
+bool V4l2Output::startPartialWrite(void)
+{
+	return m_device->startPartialWrite();
+}
+
+size_t V4l2Output::writePartial(char* buffer, size_t bufferSize)
+{
+	return m_device->writePartialInternal(buffer, bufferSize);
+}
+
+bool V4l2Output::endPartialWrite(void)
+{
+	return m_device->endPartialWrite();
+}
+

+ 29 - 0
src/v4l2_cv_mat/V4l2ReadWriteDevice.cpp

@@ -0,0 +1,29 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** V4l2ReadWriteDevice.cpp
+** 
+** V4L2 source using read/write API
+**
+** -------------------------------------------------------------------------*/
+
+#include <unistd.h>
+
+#include "V4l2ReadWriteDevice.h"
+
+V4l2ReadWriteDevice::V4l2ReadWriteDevice(const V4L2DeviceParameters&  params, v4l2_buf_type deviceType) : V4l2Device(params, deviceType) {
+}
+
+
+size_t V4l2ReadWriteDevice::writeInternal(char* buffer, size_t bufferSize) { 
+	return ::write(m_fd, buffer,  bufferSize); 
+}
+
+size_t V4l2ReadWriteDevice::readInternal(char* buffer, size_t bufferSize)  { 
+	return ::read(m_fd, buffer,  bufferSize); 
+}
+		
+	
+

+ 14 - 0
src/v4l2_cv_mat/logger.cpp

@@ -0,0 +1,14 @@
+/* ---------------------------------------------------------------------------
+** This software is in the public domain, furnished "as is", without technical
+** support, and with no warranty, express or implied, as to its usefulness for
+** any purpose.
+**
+** logger.cpp
+** 
+** -------------------------------------------------------------------------*/
+
+#include "logger.h"
+
+#ifndef HAVE_LOG4CPP
+int LogLevel=NOTICE;
+#endif