1.添加SDK下载的源
sudo apt-key adv --keyserver keys.gnupg.net --recv-key C8B3A55A6F3EFCDE || sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key C8B3A55A6F3EFCDE
sudo add-apt-repository "deb http://realsense-hw-public.s3.amazonaws.com/Debian/apt-repo xenial main" -u
2.安装依赖库
sudo apt-get install librealsense2-dkms
sudo apt-get install librealsense2-utils
sudo apt-get install librealsense2-dev
sudo apt-get install librealsense2-dbg
3.更新软件
sudo apt-get update
sudo apt-get upgrade
4.测试是否安装成功
realsense-viewer
如果没有显示图像,进行下面操作更新相机硬件: 参考教程 硬件下载网站
#查看usb接口
lsusb
#升级固件
intel-realsense-dfu -b 002 -d 002 -f -i /home/intel/Downloads/Signed_Image_UVC_5_10_6_0.bin
重新拔插,测试是否成功:
realsense-viewer
1.创建工作空间
mkdir -p ~/sensor_ws/src
cd ~/sensor_ws/src/
2.下载realsense-ros源码包
git clone https://github.com/IntelRealSense/realsense-ros.git
git clone https://github.com/pal-robotics/ddynamic_reconfigure.git
3编译源码包
cd ~/sensor_ws && catkin_make
echo "source ~/sensor_ws/devel/setup.bash" >> ~/.bashrc
source ~/.bashrc
4.测试 (1)启动rgb相机
roslaunch realsense2_camera rs_camera.launch
(2)启动点云
roslaunch realsense2_camera rs_camera.launch filters:=pointcloud
roslaunch realsense2_camera rs_camera.launch align_depth:=true
sudo apt-get install ros-kinetic-visp
##### 方法一:基于visp视觉伺服进行QR码位姿的识别
1.项目地址:vision_visp
创建相机的启动文件:
#进去realsense2_camera/launch文件夹下面
roscd realsense2_camera && cd launch
touch rs_camera_qr.launch
添加下面内容:
<launch>
<arg name="serial_no" default=""/>
<arg name="json_file_path" default=""/>
<arg name="camera" default="camera"/>
<arg name="tf_prefix" default="$(arg camera)"/>
<arg name="fisheye_width" default="640"/>
<arg name="fisheye_height" default="480"/>
<arg name="enable_fisheye" default="true"/>
<arg name="depth_width" default="640"/>
<arg name="depth_height" default="480"/>
<arg name="enable_depth" default="true"/>
<arg name="infra_width" default="640"/>
<arg name="infra_height" default="480"/>
<arg name="enable_infra1" default="true"/>
<arg name="enable_infra2" default="true"/>
<arg name="color_width" default="640"/>
<arg name="color_height" default="480"/>
<arg name="enable_color" default="true"/>
<arg name="fisheye_fps" default="30"/>
<arg name="depth_fps" default="30"/>
<arg name="infra_fps" default="30"/>
<arg name="color_fps" default="30"/>
<arg name="gyro_fps" default="400"/>
<arg name="accel_fps" default="250"/>
<arg name="enable_gyro" default="true"/>
<arg name="enable_accel" default="true"/>
<arg name="enable_pointcloud" default="false"/>
<arg name="pointcloud_texture_stream" default="RS2_STREAM_COLOR"/>
<arg name="pointcloud_texture_index" default="0"/>
<arg name="enable_sync" default="false"/>
<arg name="align_depth" default="false"/>
<arg name="filters" default=""/>
<arg name="clip_distance" default="-2"/>
<arg name="linear_accel_cov" default="0.01"/>
<arg name="initial_reset" default="false"/>
<arg name="unite_imu_method" default=""/>
<arg name="camera/color/camera_info" value="file://$(find realsense2_camera)/calibration/realsense_rgb.yaml" />
<group ns="$(arg camera)">
<include file="$(find realsense2_camera)/launch/includes/nodelet.launch.xml">
<arg name="tf_prefix" value="$(arg tf_prefix)"/>
<arg name="serial_no" value="$(arg serial_no)"/>
<arg name="json_file_path" value="$(arg json_file_path)"/>
<arg name="enable_pointcloud" value="$(arg enable_pointcloud)"/>
<arg name="pointcloud_texture_stream" value="$(arg pointcloud_texture_stream)"/>
<arg name="pointcloud_texture_index" value="$(arg pointcloud_texture_index)"/>
<arg name="enable_sync" value="$(arg enable_sync)"/>
<arg name="align_depth" value="$(arg align_depth)"/>
<arg name="fisheye_width" value="$(arg fisheye_width)"/>
<arg name="fisheye_height" value="$(arg fisheye_height)"/>
<arg name="enable_fisheye" value="$(arg enable_fisheye)"/>
<arg name="depth_width" value="$(arg depth_width)"/>
<arg name="depth_height" value="$(arg depth_height)"/>
<arg name="enable_depth" value="$(arg enable_depth)"/>
<arg name="color_width" value="$(arg color_width)"/>
<arg name="color_height" value="$(arg color_height)"/>
<arg name="enable_color" value="$(arg enable_color)"/>
<arg name="infra_width" value="$(arg infra_width)"/>
<arg name="infra_height" value="$(arg infra_height)"/>
<arg name="enable_infra1" value="$(arg enable_infra1)"/>
<arg name="enable_infra2" value="$(arg enable_infra2)"/>
<arg name="fisheye_fps" value="$(arg fisheye_fps)"/>
<arg name="depth_fps" value="$(arg depth_fps)"/>
<arg name="infra_fps" value="$(arg infra_fps)"/>
<arg name="color_fps" value="$(arg color_fps)"/>
<arg name="gyro_fps" value="$(arg gyro_fps)"/>
<arg name="accel_fps" value="$(arg accel_fps)"/>
<arg name="enable_gyro" value="$(arg enable_gyro)"/>
<arg name="enable_accel" value="$(arg enable_accel)"/>
<arg name="filters" value="$(arg filters)"/>
<arg name="clip_distance" value="$(arg clip_distance)"/>
<arg name="linear_accel_cov" value="$(arg linear_accel_cov)"/>
<arg name="initial_reset" value="$(arg initial_reset)"/>
<arg name="unite_imu_method" value="$(arg unite_imu_method)"/>
</include>
</group>
</launch>
2.下载源代码并编译
cd ~/sensor_ws/src
svn co https://github.com/GJXS1980/Graduation_Project/trunk/vision_visp
cd .. && catkin_make
3.根据实际QR码进行修改文件
(1)相机的内参文件在visp_auto_tracker/models_door/calibration.ini
例如:
# Camera intrinsics
[image]
width
640
height
480
[/camera/image_raw]
camera matrix
605.792874 0.00000 326.930239
0.00000 608.262289 234.341994
0.00000 0.00000 1.00000
distortion
0.132415 -0.362119 0.007382 0.004568 0.00000
rectification
1.00000 0.00000 0.00000
0.00000 1.00000 0.00000
0.00000 0.00000 1.00000
projection
611.422729 0.00000 329.619446 0.00000
0.00000 616.940002 236.635494 0.00000
0.00000 0.00000 1.00000 0.00000
(2)设置四个特征点
设置文件在visp_auto_tracker/models_door/pattern.cfg
(3)设置3D坐标系特征点
设置文件在visp_auto_tracker/models_door/pattern.cao
设置文件在visp_auto_tracker/models_door/pattern.wrl
##### 方法二:基于aruco进行QR码位姿的识别
1.项目地址:aruco_ros
下载源代码并编译
cd ~/sensor_ws/src
svn co https://github.com/GJXS1980/Graduation_Project/trunk/aruco_ros
cd .. && catkin_make
2.修改aruco码的配置文件
(1)在线生成aruco码
ArUco markers generator
(2)aruco的尺寸
arkerSize为aruco的尺寸大小
(3)aruco的id
markerId为aruco的id
1.项目地址:easy_handeye
下载源代码并编译
cd ~/sensor_ws/src
svn co https://github.com/GJXS1980/Graduation_Project/trunk/easy_handeye
cd .. && catkin_make
1.项目地址:ar_track_alvar
svn co https://github.com/GJXS1980/Graduation_Project/trunk/ar_track_alvar
2.启动识别
roslaunch ar_track_alvar orbbec_ar_track.launch
3.打开rviz,添加TF和masker插件
1.项目地址:zbar_opencv
svn co https://github.com/GJXS1980/Graduation_Project/trunk/zbar_opencv
2.打开QR码识别
roslaunch zbar_opencv zbar_opencv_orbbec.launch
3.订阅识别结果的话题
rostopic echo /qr_result