def _from_java()

in sagemaker-pyspark-sdk/src/sagemaker_pyspark/wrapper.py [0:0]


    def _from_java(cls, java_object):

        # primitives and spark data types are converted automatically by
        # _java2py(), in those cases there is nothing to do
        if type(java_object) != py4j.java_gateway.JavaObject:
            return java_object

        # construct a mapping of our python wrapped classes to
        # java/scala classes
        wrapped_classes = {}
        for cls in SageMakerJavaWrapper.__subclasses__():
            wrapped_classes[cls._wrapped_class] = cls

        class_name = java_object.getClass().getName()

        # SageMakerJavaWrapper classes know how to convert themselves from a Java Object
        # otherwise hand over to _java2py and hope for the best.
        if class_name in wrapped_classes:
            return wrapped_classes[class_name]._from_java(java_object)
        elif class_name.startswith("scala.None"):
            return None
        else:
            sc = SparkContext._active_spark_context
            return _java2py(sc, java_object)