void K4AROSDevice::framePublisherThread()

in src/k4a_ros_device.cpp [876:1227]


void K4AROSDevice::framePublisherThread()
{
  ros::Rate loop_rate(params_.fps);

  k4a_wait_result_t wait_result;
  k4a_result_t result;

  CameraInfo rgb_raw_camera_info;
  CameraInfo depth_raw_camera_info;
  CameraInfo rgb_rect_camera_info;
  CameraInfo depth_rect_camera_info;
  CameraInfo ir_raw_camera_info;

  Time capture_time;

  k4a::capture capture;

  if (ci_mngr_rgb_->isCalibrated())
  {
    rgb_raw_camera_info = depth_rect_camera_info = ci_mngr_rgb_->getCameraInfo();
  }
  else
  {
    calibration_data_.getRgbCameraInfo(rgb_raw_camera_info);
    calibration_data_.getRgbCameraInfo(depth_rect_camera_info);
  }

  if (ci_mngr_ir_->isCalibrated())
  {
    depth_raw_camera_info = rgb_rect_camera_info = ir_raw_camera_info = ci_mngr_ir_->getCameraInfo();
  }
  else
  {
    calibration_data_.getDepthCameraInfo(depth_raw_camera_info);
    calibration_data_.getDepthCameraInfo(rgb_rect_camera_info);
    calibration_data_.getDepthCameraInfo(ir_raw_camera_info);
  }

  //First frame needs longer to arrive, we wait up to 4 seconds for it
  const std::chrono::milliseconds firstFrameWaitTime = std::chrono::milliseconds(4 * 1000);
  //fail if we did non receive 5 consecutive frames in a row
  const std::chrono::milliseconds regularFrameWaitTime = std::chrono::milliseconds(1000 * 5 / params_.fps);
  std::chrono::milliseconds waitTime = firstFrameWaitTime;

  while (running_ && ros::ok() && !ros::isShuttingDown())
  {
    if (k4a_device_)
    {
      if (!k4a_device_.get_capture(&capture, waitTime))
      {
        ROS_FATAL("Failed to poll cameras: node cannot continue.");
        ros::requestShutdown();
        return;
      }
      else
      {
        if (params_.depth_enabled)
        {
          // Update the timestamp offset based on the difference between the system timestamp (i.e., arrival at USB bus)
          // and device timestamp (i.e., hardware clock at exposure start).
          updateTimestampOffset(capture.get_ir_image().get_device_timestamp(),
                                capture.get_ir_image().get_system_timestamp());
        }
        else if (params_.color_enabled)
        {
          updateTimestampOffset(capture.get_color_image().get_device_timestamp(),
                                capture.get_color_image().get_system_timestamp());
        }
      }
      waitTime = regularFrameWaitTime;
    }
    else if (k4a_playback_handle_)
    {
      std::lock_guard<std::mutex> guard(k4a_playback_handle_mutex_);
      if (!k4a_playback_handle_.get_next_capture(&capture))
      {
        // rewind recording if looping is enabled
        if (params_.recording_loop_enabled)
        {
          k4a_playback_handle_.seek_timestamp(std::chrono::microseconds(0), K4A_PLAYBACK_SEEK_BEGIN);
          k4a_playback_handle_.get_next_capture(&capture);
          imu_stream_end_of_file_ = false;
          last_imu_time_usec_ = 0;
        }
        else
        {
          ROS_INFO("Recording reached end of file. node cannot continue.");
          ros::requestShutdown();
          return;
        }
      }

      last_capture_time_usec_ = getCaptureTimestamp(capture).count();
    }

    CompressedImagePtr rgb_jpeg_frame(new CompressedImage);
    ImagePtr rgb_raw_frame(new Image);
    ImagePtr rgb_rect_frame(new Image);
    ImagePtr depth_raw_frame(new Image);
    ImagePtr depth_rect_frame(new Image);
    ImagePtr ir_raw_frame(new Image);
    PointCloud2Ptr point_cloud(new PointCloud2);

    if (params_.depth_enabled)
    {
      // Only do compute if we have subscribers
      // Only create ir frame when we are using a device or we have an ir image.
      // Recordings may not have synchronized captures. For unsynchronized captures without ir image skip ir frame.
      if ((ir_raw_publisher_.getNumSubscribers() > 0 || ir_raw_camerainfo_publisher_.getNumSubscribers() > 0) &&
          (k4a_device_ || capture.get_ir_image() != nullptr))
      {
        // IR images are available in all depth modes
        result = getIrFrame(capture, ir_raw_frame);

        if (result != K4A_RESULT_SUCCEEDED)
        {
          ROS_ERROR_STREAM("Failed to get raw IR frame");
          ros::shutdown();
          return;
        }
        else if (result == K4A_RESULT_SUCCEEDED)
        {
          capture_time = timestampToROS(capture.get_ir_image().get_device_timestamp());

          // Re-sychronize the timestamps with the capture timestamp
          ir_raw_camera_info.header.stamp = capture_time;
          ir_raw_frame->header.stamp = capture_time;
          ir_raw_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;

          ir_raw_publisher_.publish(ir_raw_frame);
          ir_raw_camerainfo_publisher_.publish(ir_raw_camera_info);
        }
      }

      // Depth images are not available in PASSIVE_IR mode
      if (calibration_data_.k4a_calibration_.depth_mode != K4A_DEPTH_MODE_PASSIVE_IR)
      {
        // Only create depth frame when we are using a device or we have an depth image.
        // Recordings may not have synchronized captures. For unsynchronized captures without depth image skip depth
        // frame.
        if ((depth_raw_publisher_.getNumSubscribers() > 0 || depth_raw_camerainfo_publisher_.getNumSubscribers() > 0) &&
            (k4a_device_ || capture.get_depth_image() != nullptr))
        {
          result = getDepthFrame(capture, depth_raw_frame);

          if (result != K4A_RESULT_SUCCEEDED)
          {
            ROS_ERROR_STREAM("Failed to get raw depth frame");
            ros::shutdown();
            return;
          }
          else if (result == K4A_RESULT_SUCCEEDED)
          {
            capture_time = timestampToROS(capture.get_depth_image().get_device_timestamp());

            // Re-sychronize the timestamps with the capture timestamp
            depth_raw_camera_info.header.stamp = capture_time;
            depth_raw_frame->header.stamp = capture_time;
            depth_raw_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;

            depth_raw_publisher_.publish(depth_raw_frame);
            depth_raw_camerainfo_publisher_.publish(depth_raw_camera_info);
          }
        }

        // We can only rectify the depth into the color co-ordinates if the color camera is enabled!
        // Only create rect depth frame when we are using a device or we have an depth image.
        // Recordings may not have synchronized captures. For unsynchronized captures without depth image skip rect
        // depth frame.
        if (params_.color_enabled &&
            (depth_rect_publisher_.getNumSubscribers() > 0 ||
             depth_rect_camerainfo_publisher_.getNumSubscribers() > 0) &&
            (k4a_device_ || capture.get_depth_image() != nullptr))
        {
          result = getDepthFrame(capture, depth_rect_frame, true /* rectified */);

          if (result != K4A_RESULT_SUCCEEDED)
          {
            ROS_ERROR_STREAM("Failed to get rectifed depth frame");
            ros::shutdown();
            return;
          }
          else if (result == K4A_RESULT_SUCCEEDED)
          {
            capture_time = timestampToROS(capture.get_depth_image().get_device_timestamp());

            depth_rect_frame->header.stamp = capture_time;
            depth_rect_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.rgb_camera_frame_;
            depth_rect_publisher_.publish(depth_rect_frame);

            // Re-synchronize the header timestamps since we cache the camera calibration message
            depth_rect_camera_info.header.stamp = capture_time;
            depth_rect_camerainfo_publisher_.publish(depth_rect_camera_info);
          }
        }

#if defined(K4A_BODY_TRACKING)
        // Publish body markers when body tracking is enabled and a depth image is available
        if (params_.body_tracking_enabled &&  k4abt_tracker_queue_size_ < 3 &&
            (body_marker_publisher_.getNumSubscribers() > 0 || body_index_map_publisher_.getNumSubscribers() > 0))
        {
          if (!k4abt_tracker_.enqueue_capture(capture))
          {
            ROS_ERROR("Error! Add capture to tracker process queue failed!");
            ros::shutdown();
            return;
          }
          else
          {
            ++k4abt_tracker_queue_size_;
          }
        }
#endif
      }
    }

    if (params_.color_enabled)
    {
      // Only create rgb frame when we are using a device or we have a color image.
      // Recordings may not have synchronized captures. For unsynchronized captures without color image skip rgb frame.
      if (params_.color_format == "jpeg")
      {
        if ((rgb_jpeg_publisher_.getNumSubscribers() > 0 || rgb_raw_camerainfo_publisher_.getNumSubscribers() > 0) &&
            (k4a_device_ || capture.get_color_image() != nullptr))
        {
          result = getJpegRgbFrame(capture, rgb_jpeg_frame);

          if (result != K4A_RESULT_SUCCEEDED)
          {
            ROS_ERROR_STREAM("Failed to get Jpeg frame");
            ros::shutdown();
            return;
          }

          capture_time = timestampToROS(capture.get_color_image().get_device_timestamp());

          rgb_jpeg_frame->header.stamp = capture_time;
          rgb_jpeg_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.rgb_camera_frame_;
          rgb_jpeg_publisher_.publish(rgb_jpeg_frame);

          // Re-synchronize the header timestamps since we cache the camera calibration message
          rgb_raw_camera_info.header.stamp = capture_time;
          rgb_raw_camerainfo_publisher_.publish(rgb_raw_camera_info);
        }
      }
      else if (params_.color_format == "bgra")
      {
        if ((rgb_raw_publisher_.getNumSubscribers() > 0 || rgb_raw_camerainfo_publisher_.getNumSubscribers() > 0) &&
            (k4a_device_ || capture.get_color_image() != nullptr))
        {
          result = getRbgFrame(capture, rgb_raw_frame);

          if (result != K4A_RESULT_SUCCEEDED)
          {
            ROS_ERROR_STREAM("Failed to get RGB frame");
            ros::shutdown();
            return;
          }

          capture_time = timestampToROS(capture.get_color_image().get_device_timestamp());

          rgb_raw_frame->header.stamp = capture_time;
          rgb_raw_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.rgb_camera_frame_;
          rgb_raw_publisher_.publish(rgb_raw_frame);

          // Re-synchronize the header timestamps since we cache the camera calibration message
          rgb_raw_camera_info.header.stamp = capture_time;
          rgb_raw_camerainfo_publisher_.publish(rgb_raw_camera_info);
        }

        // We can only rectify the color into the depth co-ordinates if the depth camera is enabled and processing depth
        // data Only create rgb rect frame when we are using a device or we have a synchronized image. Recordings may
        // not have synchronized captures. For unsynchronized captures image skip rgb rect frame.
        if (params_.depth_enabled && (calibration_data_.k4a_calibration_.depth_mode != K4A_DEPTH_MODE_PASSIVE_IR) &&
            (rgb_rect_publisher_.getNumSubscribers() > 0 || rgb_rect_camerainfo_publisher_.getNumSubscribers() > 0) &&
            (k4a_device_ || (capture.get_color_image() != nullptr && capture.get_depth_image() != nullptr)))
        {
          result = getRbgFrame(capture, rgb_rect_frame, true /* rectified */);

          if (result != K4A_RESULT_SUCCEEDED)
          {
            ROS_ERROR_STREAM("Failed to get rectifed depth frame");
            ros::shutdown();
            return;
          }

          capture_time = timestampToROS(capture.get_color_image().get_device_timestamp());

          rgb_rect_frame->header.stamp = capture_time;
          rgb_rect_frame->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;
          rgb_rect_publisher_.publish(rgb_rect_frame);

          // Re-synchronize the header timestamps since we cache the camera calibration message
          rgb_rect_camera_info.header.stamp = capture_time;
          rgb_rect_camerainfo_publisher_.publish(rgb_rect_camera_info);
        }
      }
    }

    // Only create pointcloud when we are using a device or we have a synchronized image.
    // Recordings may not have synchronized captures. In unsynchronized captures skip point cloud.
    if (pointcloud_publisher_.getNumSubscribers() > 0 &&
        (k4a_device_ || (capture.get_color_image() != nullptr && capture.get_depth_image() != nullptr)))
    {
      if (params_.rgb_point_cloud)
      {
        if (params_.point_cloud_in_depth_frame)
        {
          result = getRgbPointCloudInDepthFrame(capture, point_cloud);
        }
        else
        {
          result = getRgbPointCloudInRgbFrame(capture, point_cloud);
        }

        if (result != K4A_RESULT_SUCCEEDED)
        {
          ROS_ERROR_STREAM("Failed to get RGB Point Cloud");
          ros::shutdown();
          return;
        }
      }
      else if (params_.point_cloud)
      {
        result = getPointCloud(capture, point_cloud);

        if (result != K4A_RESULT_SUCCEEDED)
        {
          ROS_ERROR_STREAM("Failed to get Point Cloud");
          ros::shutdown();
          return;
        }
      }

      if (params_.point_cloud || params_.rgb_point_cloud)
      {
        pointcloud_publisher_.publish(point_cloud);
      }
    }

    if (loop_rate.cycleTime() > loop_rate.expectedCycleTime())
    {
      ROS_WARN_STREAM_THROTTLE(10, "Image processing thread is running behind."
                                       << std::endl
                                       << "Expected max loop time: " << loop_rate.expectedCycleTime() << std::endl
                                       << "Actual loop time: " << loop_rate.cycleTime() << std::endl);
    }

    ros::spinOnce();
    loop_rate.sleep();
  }
}