in kinesis_video_streamer/src/subscriber_callbacks.cpp [111:149]
void ImageTransportCallback(const KinesisStreamManagerInterface & stream_manager,
std::string stream_name, const sensor_msgs::ImageConstPtr & image)
{
Frame frame;
frame.trackId = DEFAULT_TRACK_ID;
frame.size = image->step * image->height;
/* Overflow check (since 'size', 'step' and 'height' are all 32 bit integers). */
if (image->step != 0 && frame.size / image->step != image->height) {
AWS_LOGSTREAM_WARN(
__func__,
stream_name.c_str()
<< " Integer overflow detected - image size is too big. Aborting imageTransportCallback");
return;
}
frame.frameData = reinterpret_cast<PBYTE>((void *)(image->data.data()));
frame.duration = 0;
frame.index = image->header.seq;
UINT64 generated_timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count() /
DEFAULT_TIME_UNIT_IN_NANOS;
/* Image uses standard ROS Header type which contains a (seconds, nseconds) timestamp structure.
* Need to convert to 100ns unit. */
std::chrono::seconds timestamp_in_seconds(image->header.stamp.sec);
UINT64 image_timestamp =
(std::chrono::microseconds(timestamp_in_seconds).count() * HUNDREDS_OF_NANOS_IN_A_MICROSECOND) +
(image->header.stamp.nsec / DEFAULT_TIME_UNIT_IN_NANOS);
frame.decodingTs = image_timestamp ? image_timestamp : generated_timestamp;
frame.presentationTs = image_timestamp ? image_timestamp : generated_timestamp;
frame.flags = FRAME_FLAG_NONE;
KinesisManagerStatus status = stream_manager.PutFrame(stream_name, frame);
if (KINESIS_MANAGER_STATUS_FAILED(status)) {
AWS_LOGSTREAM_WARN(__func__, stream_name.c_str() << " PutFrame failed. Error code: " << status);
} else {
ROS_DEBUG_THROTTLE(PUTFRAME_LOG_INTERVAL_IN_SECONDS, "%s PutFrame succeeded. Frame index: %du",
stream_name.c_str(), frame.index);
}
}