amoro-format-mixed/amoro-mixed-spark/v3.5/amoro-mixed-spark-3.5/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [314:372]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public PositionDeleteWriter<InternalRow> newPosDeleteWriter(
      EncryptedOutputFile file, FileFormat format, StructLike partition) {
    try {
      switch (format) {
        case PARQUET:
          StructType sparkPosDeleteSchema =
              SparkSchemaUtil.convert(DeleteSchemaUtil.posDeleteSchema(posDeleteRowSchema));
          if (writeHive) {
            return AdaptHiveParquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType ->
                        AdaptHiveSparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          } else {
            return Parquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType -> SparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          }
        case AVRO:
          return Avro.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(ignored -> new SparkAvroWriter(lazyPosDeleteSparkType()))
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        case ORC:
          return ORC.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(SparkOrcWriter::new)
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        default:
          throw new UnsupportedOperationException(
              "Cannot write pos-deletes for unsupported file format: " + format);
      }
    } catch (IOException e) {
      throw new UncheckedIOException("Failed to create new equality delete writer", e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



amoro-format-mixed/amoro-mixed-spark/v3.2/amoro-mixed-spark-3.2/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [313:371]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public PositionDeleteWriter<InternalRow> newPosDeleteWriter(
      EncryptedOutputFile file, FileFormat format, StructLike partition) {
    try {
      switch (format) {
        case PARQUET:
          StructType sparkPosDeleteSchema =
              SparkSchemaUtil.convert(DeleteSchemaUtil.posDeleteSchema(posDeleteRowSchema));
          if (writeHive) {
            return AdaptHiveParquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType ->
                        AdaptHiveSparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          } else {
            return Parquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType -> SparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          }
        case AVRO:
          return Avro.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(ignored -> new SparkAvroWriter(lazyPosDeleteSparkType()))
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        case ORC:
          return ORC.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(SparkOrcWriter::new)
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        default:
          throw new UnsupportedOperationException(
              "Cannot write pos-deletes for unsupported file format: " + format);
      }
    } catch (IOException e) {
      throw new UncheckedIOException("Failed to create new equality delete writer", e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



amoro-format-mixed/amoro-mixed-spark/v3.3/amoro-mixed-spark-3.3/src/main/java/org/apache/amoro/spark/io/InternalRowFileAppenderFactory.java [314:372]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  public PositionDeleteWriter<InternalRow> newPosDeleteWriter(
      EncryptedOutputFile file, FileFormat format, StructLike partition) {
    try {
      switch (format) {
        case PARQUET:
          StructType sparkPosDeleteSchema =
              SparkSchemaUtil.convert(DeleteSchemaUtil.posDeleteSchema(posDeleteRowSchema));
          if (writeHive) {
            return AdaptHiveParquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType ->
                        AdaptHiveSparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          } else {
            return Parquet.writeDeletes(file.encryptingOutputFile())
                .createWriterFunc(
                    msgType -> SparkParquetWriters.buildWriter(sparkPosDeleteSchema, msgType))
                .overwrite()
                .rowSchema(posDeleteRowSchema)
                .withSpec(spec)
                .withPartition(partition)
                .withKeyMetadata(file.keyMetadata())
                .transformPaths(path -> UTF8String.fromString(path.toString()))
                .buildPositionWriter();
          }
        case AVRO:
          return Avro.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(ignored -> new SparkAvroWriter(lazyPosDeleteSparkType()))
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        case ORC:
          return ORC.writeDeletes(file.encryptingOutputFile())
              .createWriterFunc(SparkOrcWriter::new)
              .overwrite()
              .rowSchema(posDeleteRowSchema)
              .withSpec(spec)
              .withPartition(partition)
              .withKeyMetadata(file.keyMetadata())
              .buildPositionWriter();

        default:
          throw new UnsupportedOperationException(
              "Cannot write pos-deletes for unsupported file format: " + format);
      }
    } catch (IOException e) {
      throw new UncheckedIOException("Failed to create new equality delete writer", e);
    }
  }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



