private static ColumnReader getColumnReader()

in exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReaderFactory.java [124:244]


  private static ColumnReader<? extends ValueVector> getColumnReader(ParquetRecordReader recordReader,
      boolean fixedLength, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, ValueVector v,
      SchemaElement schemaElement, ConvertedType convertedType) throws ExecutionSetupException {
    switch (columnChunkMetaData.getPrimitiveType().getPrimitiveTypeName()) {
      case BOOLEAN:
        return new BitReader(recordReader, descriptor, columnChunkMetaData,
            fixedLength, (BitVector) v, schemaElement);
      case INT32:
        if (convertedType == null) {
          return new ParquetFixedWidthDictionaryReaders.DictionaryIntReader(recordReader, descriptor,
              columnChunkMetaData, fixedLength, (IntVector) v, schemaElement);
        }
        switch (convertedType) {
          case DATE:
            switch(recordReader.getDateCorruptionStatus()) {
              case META_SHOWS_CORRUPTION:
                return new FixedByteAlignedReader.CorruptDateReader(recordReader, descriptor,
                    columnChunkMetaData, fixedLength, (DateVector) v, schemaElement);
              case META_SHOWS_NO_CORRUPTION:
                return new FixedByteAlignedReader.DateReader(recordReader, descriptor, columnChunkMetaData,
                    fixedLength, (DateVector) v, schemaElement);
              case META_UNCLEAR_TEST_VALUES:
                return new FixedByteAlignedReader.CorruptionDetectingDateReader(recordReader, descriptor,
                    columnChunkMetaData, fixedLength, (DateVector) v, schemaElement);
              default:
                throw new ExecutionSetupException(
                    String.format("Issue setting up parquet reader for date type, " +
                            "unrecognized date corruption status %s. See DRILL-4203 for more info.",
                        recordReader.getDateCorruptionStatus()));
            }
          case DECIMAL:
            return new ParquetFixedWidthDictionaryReaders.DictionaryVarDecimalReader(recordReader,
                descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
          case TIME_MILLIS:
            return new ParquetFixedWidthDictionaryReaders.DictionaryTimeReader(recordReader, descriptor,
                columnChunkMetaData, fixedLength, (TimeVector) v, schemaElement);
          case INT_8:
          case INT_16:
          case INT_32:
            return new ParquetFixedWidthDictionaryReaders.DictionaryIntReader(recordReader, descriptor,
                columnChunkMetaData, fixedLength, (IntVector) v, schemaElement);
          case UINT_8:
          case UINT_16:
          case UINT_32:
            return new ParquetFixedWidthDictionaryReaders.DictionaryUInt4Reader(recordReader, descriptor,
                columnChunkMetaData, fixedLength, (UInt4Vector) v, schemaElement);
          default:
            throw new ExecutionSetupException("Unsupported dictionary converted type " + convertedType + " for primitive type INT32");
        }
      case INT64:
        if (convertedType == null) {
          return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor,
              columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
        }
        switch (convertedType) {
          case INT_64:
            return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor,
              columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
          case TIMESTAMP_MICROS:
            if (recordReader.getFragmentContext().getOptions().getBoolean(ExecConstants.PARQUET_READER_TIMESTAMP_MICROS_AS_INT64)) {
              return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor,
                  columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
            } else {
              return new ParquetFixedWidthDictionaryReaders.DictionaryTimeStampMicrosReader(recordReader, descriptor,
                  columnChunkMetaData, fixedLength, (TimeStampVector) v, schemaElement);
            }
          case TIME_MICROS:
            if (recordReader.getFragmentContext().getOptions().getBoolean(ExecConstants.PARQUET_READER_TIME_MICROS_AS_INT64)) {
              return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor,
                  columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
            } else {
              return new ParquetFixedWidthDictionaryReaders.DictionaryTimeMicrosReader(recordReader, descriptor,
                  columnChunkMetaData, fixedLength, (TimeVector) v, schemaElement);
            }
          case UINT_64:
            return new ParquetFixedWidthDictionaryReaders.DictionaryUInt8Reader(recordReader, descriptor,
                columnChunkMetaData, fixedLength, (UInt8Vector) v, schemaElement);
          case DECIMAL:
            return new ParquetFixedWidthDictionaryReaders.DictionaryVarDecimalReader(recordReader,
                descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
          case TIMESTAMP_MILLIS:
            return new ParquetFixedWidthDictionaryReaders.DictionaryTimeStampReader(recordReader, descriptor,
                columnChunkMetaData, fixedLength, (TimeStampVector) v, schemaElement);
          default:
            throw new ExecutionSetupException("Unsupported dictionary converted type " + convertedType + " for primitive type INT64");
        }
      case FLOAT:
        return new ParquetFixedWidthDictionaryReaders.DictionaryFloat4Reader(recordReader, descriptor,
            columnChunkMetaData, fixedLength, (Float4Vector) v, schemaElement);
      case DOUBLE:
        return new ParquetFixedWidthDictionaryReaders.DictionaryFloat8Reader(recordReader, descriptor,
            columnChunkMetaData, fixedLength, (Float8Vector) v, schemaElement);
      case FIXED_LEN_BYTE_ARRAY:
        if (convertedType != null) {
          switch (convertedType) {
            case DECIMAL:
              return new ParquetFixedWidthDictionaryReaders.DictionaryVarDecimalReader(recordReader,
                  descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
            case INTERVAL:
              return new FixedByteAlignedReader.IntervalReader(recordReader, descriptor,
                  columnChunkMetaData, fixedLength, (IntervalVector) v, schemaElement);
          }
        }
        if (!Collections.disjoint(columnChunkMetaData.getEncodings(), ColumnReader.DICTIONARY_ENCODINGS)) {
          return new ParquetFixedWidthDictionaryReaders.DictionaryFixedBinaryReader(recordReader,
              descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
        }
        return new FixedByteAlignedReader.FixedBinaryReader(recordReader, descriptor,
            columnChunkMetaData, (VariableWidthVector) v, schemaElement);
      case INT96:
        if (recordReader.getFragmentContext().getOptions().getOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP).bool_val) {
          return new ParquetFixedWidthDictionaryReaders.DictionaryBinaryAsTimeStampReader(recordReader,
              descriptor, columnChunkMetaData, fixedLength, (TimeStampVector) v, schemaElement);
        } else {
          return new ParquetFixedWidthDictionaryReaders.DictionaryFixedBinaryReader(recordReader,
              descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
        }
      default:
        throw new ExecutionSetupException("Unsupported dictionary column type " + descriptor.getPrimitiveType().getPrimitiveTypeName().name());
    }
  }