override def run()

in spark-doris-connector/src/main/scala/org/apache/doris/spark/rdd/ScalaValueReader.scala [144:158]


    override def run(): Unit = {
      val nextBatchParams = new TScanNextBatchParams
      nextBatchParams.setContextId(contextId)
      while (!eos.get) {
        nextBatchParams.setOffset(offset)
        val nextResult = lockClient(_.getNext(nextBatchParams))
        eos.set(nextResult.isEos)
        if (!eos.get) {
          val rowBatch = new RowBatch(nextResult, schema)
          offset += rowBatch.getReadRowCount
          rowBatch.close()
          rowBatchBlockingQueue.put(rowBatch)
        }
      }
    }