佛山免费网站建设,平度网站建设公司电话,百度上推广一个网站该怎么做,上街免费网站建设以前以为rviz是用OpenGL渲染绘图#xff0c;那么获取图像里像素点对应的真实3D坐标是采用的OpenGL里提供的API实现的#xff0c;结果一看代码还真不是这样#xff0c;rviz也就渲染用了OpenGL#xff0c;其他都是自己实现的#xff0c;图像界面的实现完全是遵循MVC设计模式…以前以为rviz是用OpenGL渲染绘图那么获取图像里像素点对应的真实3D坐标是采用的OpenGL里提供的API实现的结果一看代码还真不是这样rviz也就渲染用了OpenGL其他都是自己实现的图像界面的实现完全是遵循MVC设计模式自己实现的透视投影和坐标转换等所有相关类。获取点云图像里所选择的点云点的3D坐标相关的代码是这里
src/rviz/selection/selection_manager.cpp:bool SelectionManager::getPatchDepthImage(Ogre::Viewport* viewport,int x,int y,unsigned width,unsigned height,std::vectorfloat depth_vector)
{unsigned int num_pixels width * height;depth_vector.reserve(num_pixels);setDepthTextureSize(width, height);M_CollisionObjectToSelectionHandler::iterator handler_it objects_.begin();M_CollisionObjectToSelectionHandler::iterator handler_end objects_.end();for (; handler_it ! handler_end; handler_it){handler_it-second-preRenderPass(0);}if (render(viewport, depth_render_texture_, x, y, x width, y height, depth_pixel_box_, Depth,depth_texture_width_, depth_texture_height_)){uint8_t* data_ptr (uint8_t*)depth_pixel_box_.data;for (uint32_t pixel 0; pixel num_pixels; pixel){uint8_t a data_ptr[4 * pixel];uint8_t b data_ptr[4 * pixel 1];uint8_t c data_ptr[4 * pixel 2];int int_depth (c 16) | (b 8) | a;float normalized_depth ((float)int_depth) / (float)0xffffff;depth_vector.push_back(normalized_depth * camera_-getFarClipDistance());}}else{ROS_WARN(Failed to render depth patch\n);return false;}handler_it objects_.begin();handler_end objects_.end();for (; handler_it ! handler_end; handler_it){handler_it-second-postRenderPass(0);}return true;
}bool SelectionManager::get3DPatch(Ogre::Viewport* viewport,int x,int y,unsigned width,unsigned height,bool skip_missing,std::vectorOgre::Vector3 result_points)
{boost::recursive_mutex::scoped_lock lock(global_mutex_);ROS_DEBUG(SelectionManager.get3DPatch());std::vectorfloat depth_vector;if (!getPatchDepthImage(viewport, x, y, width, height, depth_vector))return false;unsigned int pixel_counter 0;Ogre::Matrix4 projection camera_-getProjectionMatrix();float depth;for (unsigned y_iter 0; y_iter height; y_iter)for (unsigned x_iter 0; x_iter width; x_iter){depth depth_vector[pixel_counter];// Deal with missing or invalid pointsif ((depth camera_-getFarClipDistance()) || (depth 0)){pixel_counter;if (!skip_missing){result_points.push_back(Ogre::Vector3(NAN, NAN, NAN));}continue;}Ogre::Vector3 result_point;// We want to shoot rays through the center of pixels, not the corners,// so add .5 pixels to the x and y coordinate to get to the center// instead of the top left of the pixel.Ogre::Real screenx float(x_iter .5) / float(width);Ogre::Real screeny float(y_iter .5) / float(height);if (projection[3][3] 0.0) // If this is a perspective projection{// get world-space ray from camera mouse coordOgre::Ray vp_ray camera_-getCameraToViewportRay(screenx, screeny);// transform ray direction back into camera coordsOgre::Vector3 dir_cam camera_-getDerivedOrientation().Inverse() * vp_ray.getDirection();// normalize, so dir_cam.z -depthdir_cam dir_cam / dir_cam.z * depth * -1;// compute 3d point from camera origin and direction*/result_point camera_-getDerivedPosition() camera_-getDerivedOrientation() * dir_cam;}else // else this must be an orthographic projection.{// For orthographic projection, getCameraToViewportRay() does// the right thing for us, and the above math does not work.Ogre::Ray ray;camera_-getCameraToViewportRay(screenx, screeny, ray);result_point ray.getPoint(depth);}result_points.push_back(result_point);pixel_counter;}return !result_points.empty();
}bool SelectionManager::get3DPoint(Ogre::Viewport* viewport, int x, int y, Ogre::Vector3 result_point)
{ROS_DEBUG(SelectionManager.get3DPoint());std::vectorOgre::Vector3 result_points_temp;bool success get3DPatch(viewport, x, y, 1, 1, true, result_points_temp);if (result_points_temp.empty()){// return result_point unmodified if get point fails.return false;}result_point result_points_temp[0];return success;
}世界3D坐标是用的射线法计算出来。SelectionManager::get3DPoint()被rviz里多个地方调用凡是UI界面上需要查看点的坐标地方都是调用它。