in connector/src/main/scala/com/microsoft/kusto/spark/datasink/RowCSVWriterUtils.scala [104:142]
private def convertStructToJson(
row: InternalRow,
schema: StructType,
timeZone: ZoneId): String = {
val fields = schema.fields
if (fields.length != 0) {
val writer = EscapedWriter(new CharArrayWriter())
writer.write('{')
var x = 0
var isNullOrEmpty = true
while (x < fields.length && isNullOrEmpty) {
isNullOrEmpty = row.isNullAt(x)
if (!isNullOrEmpty) {
writeStructField(x)
}
x += 1
}
while (x < fields.length) {
if (!row.isNullAt(x)) {
writer.write(',')
writeStructField(x)
}
x += 1
}
writer.write('}')
def writeStructField(idx: Int): Unit = {
writer.writeStringField(fields(idx).name)
writer.write(':')
writeField(row, idx, fields(idx).dataType, timeZone, writer, nested = true)
}
writer.out.toString
} else {
"{}"
}
}