public Object deserialize()

in hologres-connector-hive-base/src/main/java/com/alibaba/hologres/hive/HoloSerDe.java [274:371]


    public Object deserialize(Writable blob) throws SerDeException {
        LOGGER.debug("Deserializing from SerDe");
        if (!(blob instanceof MapWritable)) {
            throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
        }

        if (row == null || hiveColumnTypes == null) {
            throw new SerDeException("Holo SerDe has no columns to deserialize");
        }

        row.clear();
        MapWritable input = (MapWritable) blob;
        Text columnKey = new Text();
        for (int i = 0; i < hiveColumnCount; i++) {
            columnKey.set(hiveColumnNames[i]);
            Writable value = input.get(columnKey);

            if (value == NullWritable.get()) {
                row.add(null);
            } else {
                if (hiveColumnTypes[i].getCategory() == Category.LIST) {
                    String arrayElementTypeName =
                            ((ListTypeInfo) hiveColumnTypes[i])
                                    .getListElementTypeInfo()
                                    .getTypeName();
                    switch (arrayElementTypeName) {
                            // case "int":
                            //    break;
                            // case "bigint":
                            //    row.add(value);
                            //    break;
                            // case "float":
                            //    break;
                            // case "double":
                            //    break;
                            // case "boolean":
                            //    break;
                            // case "string":
                            //    break;
                        default:
                            throw new IllegalArgumentException(
                                    String.format(
                                            "Does not support read array now, array element type %s , column name %s!",
                                            arrayElementTypeName, hiveColumnNames[i]));
                    }
                    // continue;
                }
                PrimitiveCategory columnType =
                        ((PrimitiveTypeInfo) hiveColumnTypes[i]).getPrimitiveCategory();
                switch (columnType) {
                    case BYTE:
                        row.add(Byte.valueOf(value.toString()));
                        break;
                    case SHORT:
                        row.add(Short.valueOf(value.toString()));
                        break;
                    case INT:
                        row.add(Integer.valueOf(value.toString()));
                        break;
                    case LONG:
                        row.add(Long.valueOf(value.toString()));
                        break;
                    case FLOAT:
                        row.add(Float.valueOf(value.toString()));
                        break;
                    case DOUBLE:
                        row.add(Double.valueOf(value.toString()));
                        break;
                    case DECIMAL:
                        row.add(new HiveDecimalWritable(value.toString()).getHiveDecimal());
                        break;
                    case BOOLEAN:
                        row.add(Boolean.valueOf(value.toString()));
                        break;
                    case CHAR:
                    case VARCHAR:
                    case STRING:
                        row.add(String.valueOf(value.toString()));
                        break;
                    case DATE:
                        row.add(java.sql.Date.valueOf(value.toString()));
                        break;
                    case TIMESTAMP:
                        row.add(java.sql.Timestamp.valueOf(value.toString()));
                        break;
                    case BINARY:
                        row.add(value.toString().getBytes(StandardCharsets.UTF_8));
                        break;
                    default:
                        throw new SerDeException(
                                String.format(
                                        "hologres connector not support type %s, column name %s",
                                        columnType.name(), hiveColumnNames[i]));
                }
            }
        }
        return row;
    }