in tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc [133:219]
absl::Status ConvertFromNv21(const FrameBuffer& buffer,
FrameBuffer* output_buffer) {
ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
switch (output_buffer->format()) {
case FrameBuffer::Format::kRGB: {
// The RAW format of Libyuv represents the 8-bit interleaved RGB format in
// the big endian style with R being the first byte in memory.
int ret = libyuv::NV21ToRAW(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.v_buffer,
yuv_data.uv_row_stride,
const_cast<uint8*>(output_buffer->plane(0).buffer),
output_buffer->plane(0).stride.row_stride_bytes,
buffer.dimension().width, buffer.dimension().height);
if (ret != 0) {
return CreateStatusWithPayload(
StatusCode::kUnknown, "Libyuv NV21ToRAW operation failed.",
TfLiteSupportStatus::kImageProcessingBackendError);
}
break;
}
case FrameBuffer::Format::kRGBA: {
// The libyuv ABGR format is interleaved RGBA format in memory.
int ret = libyuv::NV21ToABGR(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.v_buffer,
yuv_data.uv_row_stride,
const_cast<uint8*>(output_buffer->plane(0).buffer),
output_buffer->plane(0).stride.row_stride_bytes,
buffer.dimension().width, buffer.dimension().height);
if (ret != 0) {
return CreateStatusWithPayload(
StatusCode::kUnknown, "Libyuv NV21ToABGR operation failed.",
TfLiteSupportStatus::kImageProcessingBackendError);
}
break;
}
case FrameBuffer::Format::kYV12:
case FrameBuffer::Format::kYV21: {
ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
int ret = libyuv::NV21ToI420(
yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.v_buffer,
yuv_data.uv_row_stride, const_cast<uint8_t*>(output_data.y_buffer),
output_data.y_row_stride, const_cast<uint8_t*>(output_data.u_buffer),
output_data.uv_row_stride, const_cast<uint8_t*>(output_data.v_buffer),
output_data.uv_row_stride, output_buffer->dimension().width,
output_buffer->dimension().height);
if (ret != 0) {
return CreateStatusWithPayload(
StatusCode::kUnknown, "Libyuv NV21ToI420 operation failed.",
TfLiteSupportStatus::kImageProcessingBackendError);
}
break;
}
case FrameBuffer::Format::kNV12: {
ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
const_cast<uint8*>(output_data.y_buffer),
output_data.y_row_stride, buffer.dimension().width,
buffer.dimension().height);
ASSIGN_OR_RETURN(
const FrameBuffer::Dimension uv_plane_dimension,
GetUvPlaneDimension(buffer.dimension(), buffer.format()));
libyuv::SwapUVPlane(yuv_data.v_buffer, yuv_data.uv_row_stride,
const_cast<uint8*>(output_data.u_buffer),
output_data.uv_row_stride, uv_plane_dimension.width,
uv_plane_dimension.height);
break;
}
case FrameBuffer::Format::kGRAY: {
libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
const_cast<uint8*>(output_buffer->plane(0).buffer),
output_buffer->plane(0).stride.row_stride_bytes,
output_buffer->dimension().width,
output_buffer->dimension().height);
break;
}
default:
return CreateStatusWithPayload(
StatusCode::kInternal,
absl::StrFormat("Format %i is not supported.",
output_buffer->format()),
TfLiteSupportStatus::kImageProcessingError);
}
return absl::OkStatus();
}