private def convertToJson()

in kernel/src/main/scala/org/apache/toree/magic/builtin/Dataframe.scala [90:118]


  private def convertToJson(rddCode: String): CellMagicOutput = {
    val (result, message) = kernelInterpreter.interpret(rddCode)
    result match {
      case Results.Success =>
        val rddVarName = kernelInterpreter.lastExecutionVariableName.get
        kernelInterpreter.read(rddVarName).map(variableVal => {
          _dataFrameConverter.convert(
            variableVal.asInstanceOf[org.apache.spark.sql.DataFrame],
            outputType,
            limit
          ).map(output =>
            CellMagicOutput(outputTypeToMimeType -> output)
          ).get
        }).getOrElse(CellMagicOutput(MIMEType.PlainText -> DataFrameResponses.NoVariableFound(rddVarName)))
      case Results.Aborted =>
        logger.error(DataFrameResponses.ErrorMessage(outputType, DataFrameResponses.MagicAborted))
        CellMagicOutput(
          MIMEType.PlainText -> DataFrameResponses.ErrorMessage(outputType, DataFrameResponses.MagicAborted)
        )
      case Results.Error =>
        val error = message.right.get.asInstanceOf[ExecuteError]
        val errorMessage = DataFrameResponses.ErrorMessage(outputType, error.value)
        logger.error(errorMessage)
        CellMagicOutput(MIMEType.PlainText -> errorMessage)
      case Results.Incomplete =>
        logger.error(DataFrameResponses.Incomplete)
        CellMagicOutput(MIMEType.PlainText -> DataFrameResponses.Incomplete)
    }
  }