public ParquetValueReader primitive()

in amoro-format-mixed/amoro-mixed-flink/amoro-mixed-flink-common-iceberg-bridge/src/main/java/org/apache/iceberg/flink/data/AdaptHiveFlinkParquetReaders.java [191:285]


    public ParquetValueReader<?> primitive(
        org.apache.iceberg.types.Type.PrimitiveType expected, PrimitiveType primitive) {
      if (expected == null) {
        return null;
      }

      ColumnDescriptor desc = type.getColumnDescription(currentPath());

      if (primitive.getOriginalType() != null) {
        switch (primitive.getOriginalType()) {
          case ENUM:
          case JSON:
          case UTF8:
            return new StringReader(desc);
          case INT_8:
          case INT_16:
          case INT_32:
            if (expected.typeId() == Types.LongType.get().typeId()) {
              return new ParquetValueReaders.IntAsLongReader(desc);
            } else {
              return new ParquetValueReaders.UnboxedReader<>(desc);
            }
          case TIME_MICROS:
            return new LossyMicrosToMillisTimeReader(desc);
          case TIME_MILLIS:
            return new MillisTimeReader(desc);
          case DATE:
          case INT_64:
            return new ParquetValueReaders.UnboxedReader<>(desc);
          case TIMESTAMP_MICROS:
            if (((Types.TimestampType) expected).shouldAdjustToUTC()) {
              return new MicrosToTimestampTzReader(desc);
            } else {
              return new MicrosToTimestampReader(desc);
            }
          case TIMESTAMP_MILLIS:
            if (((Types.TimestampType) expected).shouldAdjustToUTC()) {
              return new MillisToTimestampTzReader(desc);
            } else {
              return new MillisToTimestampReader(desc);
            }
          case DECIMAL:
            DecimalLogicalTypeAnnotation decimal =
                (DecimalLogicalTypeAnnotation) primitive.getLogicalTypeAnnotation();
            switch (primitive.getPrimitiveTypeName()) {
              case BINARY:
              case FIXED_LEN_BYTE_ARRAY:
                return new BinaryDecimalReader(desc, decimal.getPrecision(), decimal.getScale());
              case INT64:
                return new LongDecimalReader(desc, decimal.getPrecision(), decimal.getScale());
              case INT32:
                return new IntegerDecimalReader(desc, decimal.getPrecision(), decimal.getScale());
              default:
                throw new UnsupportedOperationException(
                    "Unsupported base type for decimal: " + primitive.getPrimitiveTypeName());
            }
          case BSON:
            return new ParquetValueReaders.ByteArrayReader(desc);
          default:
            throw new UnsupportedOperationException(
                "Unsupported logical type: " + primitive.getOriginalType());
        }
      }

      switch (primitive.getPrimitiveTypeName()) {
        case FIXED_LEN_BYTE_ARRAY:
        case BINARY:
          return new ParquetValueReaders.ByteArrayReader(desc);
        case INT32:
          if (expected.typeId() == org.apache.iceberg.types.Type.TypeID.LONG) {
            return new ParquetValueReaders.IntAsLongReader(desc);
          } else {
            return new ParquetValueReaders.UnboxedReader<>(desc);
          }
        case FLOAT:
          if (expected.typeId() == org.apache.iceberg.types.Type.TypeID.DOUBLE) {
            return new ParquetValueReaders.FloatAsDoubleReader(desc);
          } else {
            return new ParquetValueReaders.UnboxedReader<>(desc);
          }
        case BOOLEAN:
        case INT64:
        case DOUBLE:
          return new ParquetValueReaders.UnboxedReader<>(desc);
        case INT96:
          Types.TimestampType tsMicrosType = (Types.TimestampType) expected;
          if (tsMicrosType.shouldAdjustToUTC()) {
            return new TimestampIntWithTZ96Reader(desc);
          } else {
            return new TimestampIntWithOutTZ96Reader(desc);
          }
        default:
          throw new UnsupportedOperationException("Unsupported type: " + primitive);
      }
    }