amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [184:231]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public FileAppender<InternalRow> newAppender(OutputFile file, FileFormat fileFormat) {
    MetricsConfig metricsConfig = MetricsConfig.fromProperties(properties);
    try {
      switch (fileFormat) {
        case PARQUET:
          if (writeHive) {
            return AdaptHiveParquet.write(file)
                .createWriterFunc(
                    msgType -> AdaptHiveSparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          } else {
            return Parquet.write(file)
                .createWriterFunc(msgType -> SparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          }

        case AVRO:
          return Avro.write(file)
              .createWriterFunc(ignored -> new SparkAvroWriter(dsSchema))
              .setAll(properties)
              .schema(writeSchema)
              .overwrite()
              .build();

        case ORC:
          return ORC.write(file)
              .createWriterFunc(SparkOrcWriter::new)
              .setAll(properties)
              .metricsConfig(metricsConfig)
              .schema(writeSchema)
              .overwrite()
              .build();

        default:
          throw new UnsupportedOperationException("Cannot write unknown format: " + fileFormat);
      }
    } catch (IOException e) {
      throw new UncheckedIOException(e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [183:230]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public FileAppender<InternalRow> newAppender(OutputFile file, FileFormat fileFormat) {
    MetricsConfig metricsConfig = MetricsConfig.fromProperties(properties);
    try {
      switch (fileFormat) {
        case PARQUET:
          if (writeHive) {
            return AdaptHiveParquet.write(file)
                .createWriterFunc(
                    msgType -> AdaptHiveSparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          } else {
            return Parquet.write(file)
                .createWriterFunc(msgType -> SparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          }

        case AVRO:
          return Avro.write(file)
              .createWriterFunc(ignored -> new SparkAvroWriter(dsSchema))
              .setAll(properties)
              .schema(writeSchema)
              .overwrite()
              .build();

        case ORC:
          return ORC.write(file)
              .createWriterFunc(SparkOrcWriter::new)
              .setAll(properties)
              .metricsConfig(metricsConfig)
              .schema(writeSchema)
              .overwrite()
              .build();

        default:
          throw new UnsupportedOperationException("Cannot write unknown format: " + fileFormat);
      }
    } catch (IOException e) {
      throw new UncheckedIOException(e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [184:231]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public FileAppender<InternalRow> newAppender(OutputFile file, FileFormat fileFormat) {
    MetricsConfig metricsConfig = MetricsConfig.fromProperties(properties);
    try {
      switch (fileFormat) {
        case PARQUET:
          if (writeHive) {
            return AdaptHiveParquet.write(file)
                .createWriterFunc(
                    msgType -> AdaptHiveSparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          } else {
            return Parquet.write(file)
                .createWriterFunc(msgType -> SparkParquetWriters.buildWriter(dsSchema, msgType))
                .setAll(properties)
                .metricsConfig(metricsConfig)
                .schema(writeSchema)
                .overwrite()
                .build();
          }

        case AVRO:
          return Avro.write(file)
              .createWriterFunc(ignored -> new SparkAvroWriter(dsSchema))
              .setAll(properties)
              .schema(writeSchema)
              .overwrite()
              .build();

        case ORC:
          return ORC.write(file)
              .createWriterFunc(SparkOrcWriter::new)
              .setAll(properties)
              .metricsConfig(metricsConfig)
              .schema(writeSchema)
              .overwrite()
              .build();

        default:
          throw new UnsupportedOperationException("Cannot write unknown format: " + fileFormat);
      }
    } catch (IOException e) {
      throw new UncheckedIOException(e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



