flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/read/HiveVectorizedOrcSplitReader.java [49:76]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            String hiveVersion,
            JobConf jobConf,
            String[] fieldNames,
            DataType[] fieldTypes,
            int[] selectedFields,
            HiveTableInputSplit split)
            throws IOException {
        StorageDescriptor sd = split.getHiveTablePartition().getStorageDescriptor();

        Configuration conf = new Configuration(jobConf);
        sd.getSerdeInfo().getParameters().forEach(conf::set);

        InputSplit hadoopSplit = split.getHadoopInputSplit();
        FileSplit fileSplit;
        if (hadoopSplit instanceof FileSplit) {
            fileSplit = (FileSplit) hadoopSplit;
        } else {
            throw new IllegalArgumentException("Unknown split type: " + hadoopSplit);
        }

        Map<String, Object> partitionValues =
                HivePartitionUtils.parsePartitionValues(
                        split.getHiveTablePartition().getPartitionSpec(),
                        fieldNames,
                        fieldTypes,
                        JobConfUtils.getDefaultPartitionName(jobConf),
                        HiveShimLoader.loadHiveShim(hiveVersion));
        this.reader =
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/read/HiveVectorizedParquetSplitReader.java [47:74]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            String hiveVersion,
            JobConf jobConf,
            String[] fieldNames,
            DataType[] fieldTypes,
            int[] selectedFields,
            HiveTableInputSplit split)
            throws IOException {
        StorageDescriptor sd = split.getHiveTablePartition().getStorageDescriptor();

        Configuration conf = new Configuration(jobConf);
        sd.getSerdeInfo().getParameters().forEach(conf::set);

        InputSplit hadoopSplit = split.getHadoopInputSplit();
        FileSplit fileSplit;
        if (hadoopSplit instanceof FileSplit) {
            fileSplit = (FileSplit) hadoopSplit;
        } else {
            throw new IllegalArgumentException("Unknown split type: " + hadoopSplit);
        }

        Map<String, Object> partitionValues =
                HivePartitionUtils.parsePartitionValues(
                        split.getHiveTablePartition().getPartitionSpec(),
                        fieldNames,
                        fieldTypes,
                        JobConfUtils.getDefaultPartitionName(jobConf),
                        HiveShimLoader.loadHiveShim(hiveVersion));
        this.reader =
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



