in spark-doris-connector/src/main/java/org/apache/doris/spark/util/ListUtils.java [49:64]
public static void divideAndSerialize(List<Map<Object, Object>> batch, List<String> result, String lineDelimiter)
throws JsonProcessingException {
String serializedResult = generateSerializedResult(batch, lineDelimiter);
// if an error occurred in the batch call to getBytes ,average divide the batch
try {
//the "Requested array size exceeds VM limit" exception occurs when the collection is large
serializedResult.getBytes("UTF-8");
result.add(serializedResult);
return;
} catch (Throwable error) {
LOG.error("getBytes error:{} ,average divide the collection", ExceptionUtils.getStackTrace(error));
}
for (List<Map<Object, Object>> avgSubCollection : getAvgSubCollections(batch)) {
divideAndSerialize(avgSubCollection, result, lineDelimiter);
}
}