def this()

in parquet/src/main/scala/magnolify/parquet/ParquetType.scala [208:241]


    def this() = this(null)

    override def getName: String = "magnolify"

    private var recordConsumer: RecordConsumer = null

    override def init(configuration: Configuration): hadoop.WriteSupport.WriteContext = {
      if (parquetType == null) {
        parquetType = SerializationUtils.fromBase64[ParquetType[T]](configuration.get(WriteTypeKey))
      }

      val schema = Schema.message(parquetType.schema)
      val metadata = new java.util.HashMap[String, String]()

      if (parquetType.properties.writeAvroSchemaToMetadata) {
        try {
          metadata.put(
            AVRO_SCHEMA_METADATA_KEY,
            parquetType.avroSchema.toString()
          )
        } catch {
          // parquet-avro has greater schema restrictions than magnolify-parquet, e.g., parquet-avro does not
          // support Maps with non-Binary key types
          case e: IllegalArgumentException =>
            logger.warn(
              s"Writer schema `$schema` contains a type not supported by Avro schemas; will not write " +
                s"key $AVRO_SCHEMA_METADATA_KEY to file metadata",
              e
            )
        }
      }

      new hadoop.WriteSupport.WriteContext(schema, metadata)
    }