def writeRowAsCSV()

in connector/src/main/scala/com/microsoft/kusto/spark/datasink/RowCSVWriterUtils.scala [22:47]


  def writeRowAsCSV(
      row: InternalRow,
      schema: StructType,
      timeZone: ZoneId,
      writer: Writer): Unit = {
    val schemaFields: Array[StructField] = schema.fields

    if (!row.isNullAt(0)) {
      writeField(
        row,
        fieldIndexInRow = 0,
        schemaFields(0).dataType,
        timeZone,
        writer,
        nested = false)
    }

    for (i <- 1 until row.numFields) {
      writer.write(',')
      if (!row.isNullAt(i)) {
        writeField(row, i, schemaFields(i).dataType, timeZone, writer, nested = false)
      }
    }

    writer.newLine()
  }