absl::Status ConvertFromYv()

in tensorflow_lite_support/cc/task/vision/utils/libyuv_frame_buffer_utils.cc [223:328]


absl::Status ConvertFromYv(const FrameBuffer& buffer,
                           FrameBuffer* output_buffer) {
  ASSIGN_OR_RETURN(FrameBuffer::YuvData yuv_data,
                   FrameBuffer::GetYuvDataFromFrameBuffer(buffer));
  switch (output_buffer->format()) {
    case FrameBuffer::Format::kRGB: {
      // The RAW format of Libyuv represents the 8-bit interleaved RGB format in
      // the big endian style with R being the first byte in memory.
      int ret = libyuv::I420ToRAW(
          yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
          yuv_data.uv_row_stride, yuv_data.v_buffer, yuv_data.uv_row_stride,
          const_cast<uint8*>(output_buffer->plane(0).buffer),
          output_buffer->plane(0).stride.row_stride_bytes,
          buffer.dimension().width, buffer.dimension().height);
      if (ret != 0) {
        return CreateStatusWithPayload(
            StatusCode::kUnknown, "Libyuv I420ToRAW operation failed.",
            TfLiteSupportStatus::kImageProcessingBackendError);
      }
      break;
    }
    case FrameBuffer::Format::kRGBA: {
      // The libyuv ABGR format is interleaved RGBA format in memory.
      int ret = libyuv::I420ToABGR(
          yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
          yuv_data.uv_row_stride, yuv_data.v_buffer, yuv_data.uv_row_stride,
          const_cast<uint8*>(output_buffer->plane(0).buffer),
          output_buffer->plane(0).stride.row_stride_bytes,
          buffer.dimension().width, buffer.dimension().height);
      if (ret != 0) {
        return CreateStatusWithPayload(
            StatusCode::kUnknown, "Libyuv I420ToABGR operation failed.",
            TfLiteSupportStatus::kImageProcessingBackendError);
      }
      break;
    }
    case FrameBuffer::Format::kNV12: {
      ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
                       FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
      int ret = libyuv::I420ToNV12(
          yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
          yuv_data.uv_row_stride, yuv_data.v_buffer, yuv_data.uv_row_stride,
          const_cast<uint8*>(output_data.y_buffer), output_data.y_row_stride,
          const_cast<uint8*>(output_data.u_buffer), output_data.uv_row_stride,
          output_buffer->dimension().width, output_buffer->dimension().height);
      if (ret != 0) {
        return CreateStatusWithPayload(
            StatusCode::kUnknown, "Libyuv I420ToNV12 operation failed.",
            TfLiteSupportStatus::kImageProcessingBackendError);
      }
      break;
    }
    case FrameBuffer::Format::kNV21: {
      ASSIGN_OR_RETURN(FrameBuffer::YuvData output_data,
                       FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
      int ret = libyuv::I420ToNV21(
          yuv_data.y_buffer, yuv_data.y_row_stride, yuv_data.u_buffer,
          yuv_data.uv_row_stride, yuv_data.v_buffer, yuv_data.uv_row_stride,
          const_cast<uint8*>(output_data.y_buffer), output_data.y_row_stride,
          const_cast<uint8*>(output_data.v_buffer), output_data.uv_row_stride,
          output_buffer->dimension().width, output_buffer->dimension().height);
      if (ret != 0) {
        return CreateStatusWithPayload(
            StatusCode::kUnknown, "Libyuv I420ToNV21 operation failed.",
            TfLiteSupportStatus::kImageProcessingBackendError);
      }
      break;
    }
    case FrameBuffer::Format::kGRAY: {
      libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
                        const_cast<uint8*>(output_buffer->plane(0).buffer),
                        output_buffer->plane(0).stride.row_stride_bytes,
                        output_buffer->dimension().width,
                        output_buffer->dimension().height);
      break;
    }
    case FrameBuffer::Format::kYV12:
    case FrameBuffer::Format::kYV21: {
      ASSIGN_OR_RETURN(FrameBuffer::YuvData output_yuv_data,
                       FrameBuffer::GetYuvDataFromFrameBuffer(*output_buffer));
      ASSIGN_OR_RETURN(
          const FrameBuffer::Dimension uv_plane_dimension,
          GetUvPlaneDimension(buffer.dimension(), buffer.format()));
      libyuv::CopyPlane(yuv_data.y_buffer, yuv_data.y_row_stride,
                        const_cast<uint8*>(output_yuv_data.y_buffer),
                        output_yuv_data.y_row_stride, buffer.dimension().width,
                        buffer.dimension().height);
      libyuv::CopyPlane(yuv_data.u_buffer, yuv_data.uv_row_stride,
                        const_cast<uint8*>(output_yuv_data.u_buffer),
                        output_yuv_data.uv_row_stride, uv_plane_dimension.width,
                        uv_plane_dimension.height);
      libyuv::CopyPlane(yuv_data.v_buffer, yuv_data.uv_row_stride,
                        const_cast<uint8*>(output_yuv_data.v_buffer),
                        output_yuv_data.uv_row_stride, uv_plane_dimension.width,
                        uv_plane_dimension.height);
      break;
    }
    default:
      return CreateStatusWithPayload(
          StatusCode::kInternal,
          absl::StrFormat("Format %i is not supported.",
                          output_buffer->format()),
          TfLiteSupportStatus::kImageProcessingError);
  }
  return absl::OkStatus();
}