in connector/src/main/scala/com/microsoft/kusto/spark/datasink/RowCSVWriterUtils.scala [214:253]
private def writeDecimalField(
row: SpecializedGetters,
fieldIndexInRow: Int,
precision: Int,
scale: Int,
writer: Writer): Unit = {
writer.write('"')
val (numStr: String, negative: Boolean) = if (precision <= Decimal.MAX_LONG_DIGITS) {
val num: Long = row.getLong(fieldIndexInRow)
(num.abs.toString, num < 0)
} else {
val bytes = row.getBinary(fieldIndexInRow)
val num = new BigInteger(bytes)
(num.abs.toString, num.signum() < 0)
}
// Get string representation without scientific notation
var point = numStr.length - scale
if (negative) {
writer.write("-")
}
if (point <= 0) {
writer.write('0')
writer.write('.')
while (point < 0) {
writer.write('0')
point += 1
}
writer.write(numStr)
} else {
for (i <- 0 until numStr.length) {
if (point == i) {
writer.write('.')
}
writer.write(numStr(i))
}
}
writer.write('"')
}