in parquet-cli/src/main/java/org/apache/parquet/cli/commands/ShowColumnIndexCommand.java [82:135]
public int run() throws IOException {
Preconditions.checkArgument(files != null && files.size() >= 1, "A Parquet file is required.");
Preconditions.checkArgument(files.size() == 1, "Cannot process multiple Parquet files.");
InputFile in = HadoopInputFile.fromPath(qualifiedPath(files.get(0)), getConf());
if (!showColumnIndex && !showOffsetIndex) {
showColumnIndex = true;
showOffsetIndex = true;
}
Set<String> rowGroupIndexSet = new HashSet<>();
if (rowGroupIndexes != null) {
rowGroupIndexSet.addAll(rowGroupIndexes);
}
try (ParquetFileReader reader = ParquetFileReader.open(in)) {
boolean firstBlock = true;
int rowGroupIndex = 0;
for (BlockMetaData block : reader.getFooter().getBlocks()) {
if (!rowGroupIndexSet.isEmpty() && !rowGroupIndexSet.contains(Integer.toString(rowGroupIndex))) {
++rowGroupIndex;
continue;
}
if (!firstBlock) {
console.info("");
}
firstBlock = false;
console.info("row-group {}:", rowGroupIndex);
for (ColumnChunkMetaData column : getColumns(block)) {
String path = column.getPath().toDotString();
if (showColumnIndex) {
console.info("column index for column {}:", path);
ColumnIndex columnIndex = reader.readColumnIndex(column);
if (columnIndex == null) {
console.info("NONE");
} else {
console.info(columnIndex.toString());
}
}
if (showOffsetIndex) {
console.info("offset index for column {}:", path);
OffsetIndex offsetIndex = reader.readOffsetIndex(column);
if (offsetIndex == null) {
console.info("NONE");
} else {
console.info(offsetIndex.toString());
}
}
}
++rowGroupIndex;
}
}
return 0;
}