private def makeSerializable()

in connector/src/main/scala/com/datastax/spark/connector/writer/ObjectSizeEstimator.scala [32:43]


  private def makeSerializable(obj: Any): AnyRef = {
    obj match {
      case bb: ByteBuffer => ByteBufferUtil.toArray(bb)
      case list: java.util.List[_] => list.asScala.map(makeSerializable)
      case list: List[_] => list.map(makeSerializable)
      case set: java.util.Set[_] => set.asScala.map(makeSerializable)
      case set: Set[_] => set.map(makeSerializable)
      case map: java.util.Map[_, _] => map.asScala.map { case (k, v) => (makeSerializable(k), makeSerializable(v)) }
      case map: Map[_, _] => map.map { case (k, v) => (makeSerializable(k), makeSerializable(v)) }
      case other => other.asInstanceOf[AnyRef]
    }
  }