hudi-flink-datasource/hudi-flink1.14.x/src/main/java/org/apache/hudi/table/format/cow/vector/reader/BaseVectorizedColumnReader.java [167:201]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  private void initDataReader(Encoding dataEncoding, ByteBufferInputStream in, int valueCount)
      throws IOException {
    this.pageValueCount = valueCount;
    this.endOfPageValueCount = valuesRead + pageValueCount;
    if (dataEncoding.usesDictionary()) {
      this.dataColumn = null;
      if (dictionary == null) {
        throw new IOException(
            "could not read page in col "
                + descriptor
                + " as the dictionary was missing for encoding "
                + dataEncoding);
      }
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getDictionaryBasedValuesReader(
                  descriptor, VALUES, dictionary.getDictionary()),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = true;
    } else {
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getValuesReader(descriptor, VALUES),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = false;
    }

    try {
      dataColumn.initFromPage(pageValueCount, in);
    } catch (IOException e) {
      throw new IOException("could not read page in col " + descriptor, e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



hudi-flink-datasource/hudi-flink1.17.x/src/main/java/org/apache/hudi/table/format/cow/vector/reader/BaseVectorizedColumnReader.java [167:201]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  private void initDataReader(Encoding dataEncoding, ByteBufferInputStream in, int valueCount)
      throws IOException {
    this.pageValueCount = valueCount;
    this.endOfPageValueCount = valuesRead + pageValueCount;
    if (dataEncoding.usesDictionary()) {
      this.dataColumn = null;
      if (dictionary == null) {
        throw new IOException(
            "could not read page in col "
                + descriptor
                + " as the dictionary was missing for encoding "
                + dataEncoding);
      }
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getDictionaryBasedValuesReader(
                  descriptor, VALUES, dictionary.getDictionary()),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = true;
    } else {
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getValuesReader(descriptor, VALUES),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = false;
    }

    try {
      dataColumn.initFromPage(pageValueCount, in);
    } catch (IOException e) {
      throw new IOException("could not read page in col " + descriptor, e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



hudi-flink-datasource/hudi-flink1.16.x/src/main/java/org/apache/hudi/table/format/cow/vector/reader/BaseVectorizedColumnReader.java [167:201]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  private void initDataReader(Encoding dataEncoding, ByteBufferInputStream in, int valueCount)
      throws IOException {
    this.pageValueCount = valueCount;
    this.endOfPageValueCount = valuesRead + pageValueCount;
    if (dataEncoding.usesDictionary()) {
      this.dataColumn = null;
      if (dictionary == null) {
        throw new IOException(
            "could not read page in col "
                + descriptor
                + " as the dictionary was missing for encoding "
                + dataEncoding);
      }
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getDictionaryBasedValuesReader(
                  descriptor, VALUES, dictionary.getDictionary()),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = true;
    } else {
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getValuesReader(descriptor, VALUES),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = false;
    }

    try {
      dataColumn.initFromPage(pageValueCount, in);
    } catch (IOException e) {
      throw new IOException("could not read page in col " + descriptor, e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



hudi-flink-datasource/hudi-flink1.18.x/src/main/java/org/apache/hudi/table/format/cow/vector/reader/BaseVectorizedColumnReader.java [167:201]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  private void initDataReader(Encoding dataEncoding, ByteBufferInputStream in, int valueCount)
      throws IOException {
    this.pageValueCount = valueCount;
    this.endOfPageValueCount = valuesRead + pageValueCount;
    if (dataEncoding.usesDictionary()) {
      this.dataColumn = null;
      if (dictionary == null) {
        throw new IOException(
            "could not read page in col "
                + descriptor
                + " as the dictionary was missing for encoding "
                + dataEncoding);
      }
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getDictionaryBasedValuesReader(
                  descriptor, VALUES, dictionary.getDictionary()),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = true;
    } else {
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getValuesReader(descriptor, VALUES),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = false;
    }

    try {
      dataColumn.initFromPage(pageValueCount, in);
    } catch (IOException e) {
      throw new IOException("could not read page in col " + descriptor, e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



hudi-flink-datasource/hudi-flink1.15.x/src/main/java/org/apache/hudi/table/format/cow/vector/reader/BaseVectorizedColumnReader.java [167:201]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  private void initDataReader(Encoding dataEncoding, ByteBufferInputStream in, int valueCount)
      throws IOException {
    this.pageValueCount = valueCount;
    this.endOfPageValueCount = valuesRead + pageValueCount;
    if (dataEncoding.usesDictionary()) {
      this.dataColumn = null;
      if (dictionary == null) {
        throw new IOException(
            "could not read page in col "
                + descriptor
                + " as the dictionary was missing for encoding "
                + dataEncoding);
      }
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getDictionaryBasedValuesReader(
                  descriptor, VALUES, dictionary.getDictionary()),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = true;
    } else {
      dataColumn =
          ParquetDataColumnReaderFactory.getDataColumnReaderByType(
              type.asPrimitiveType(),
              dataEncoding.getValuesReader(descriptor, VALUES),
              isUtcTimestamp);
      this.isCurrentPageDictionaryEncoded = false;
    }

    try {
      dataColumn.initFromPage(pageValueCount, in);
    } catch (IOException e) {
      throw new IOException("could not read page in col " + descriptor, e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



