private def convertMapToJson()

in connector/src/main/scala/com/microsoft/kusto/spark/datasink/RowCSVWriterUtils.scala [166:212]


  private def convertMapToJson(map: MapData, fieldsType: MapType, timeZone: ZoneId): String = {
    val keys = map.keyArray()
    val values = map.valueArray()

    val writer = EscapedWriter(new CharArrayWriter())

    writer.write('{')

    var x = 0
    var isNull = true
    while (x < map.keyArray().numElements() && isNull) {
      isNull = values.isNullAt(x)
      if (!isNull) {
        writeMapField(x)
      }
      x += 1
    }

    while (x < map.keyArray().numElements()) {
      if (!values.isNullAt(x)) {
        writer.write(',')
        writeMapField(x)
      }
      x += 1
    }

    writer.write('}')

    def writeMapField(idx: Int): Unit = {
      writeField(
        keys,
        fieldIndexInRow = idx,
        dataType = fieldsType.keyType,
        timeZone = timeZone,
        writer,
        nested = true)
      writer.write(':')
      writeField(
        values,
        fieldIndexInRow = idx,
        dataType = fieldsType.valueType,
        timeZone = timeZone,
        writer = writer,
        nested = true)
    }
    writer.out.toString
  }