def _write_spark_magic_conf()

in sagemaker_studio_sparkmagic_lib/sparkmagic.py [0:0]


def _write_spark_magic_conf(cluster, user_name, skip_krb, spark_magic_override_path):
    """
    example config: https://github.com/jupyter-incubator/sparkmagic/blob/master/sparkmagic/example_config.json
    """
    here = os.path.dirname(__file__)
    with open(os.path.join(here, "data", "sample_config.json")) as f:
        basic_conf = json.load(f)

    basic_conf["kernel_python_credentials"] = {
        "username": user_name,
        "password": "",
        "url": f"http://{cluster.primary_node_private_dns_name()}:8998",
        "auth": "None",
    }
    basic_conf["ignore_ssl_errors"] = True

    if _is_krb_cluster(cluster, skip_krb):
        basic_conf["kernel_python_credentials"]["auth"] = "Kerberos"
        # kerberos default values copied from example config
        basic_conf["kerberos_auth_configuration"] = {
            "mutual_authentication": 1,
            "service": "HTTP",
            "delegate": False,
            "force_preemptive": True,
            "principal": "",
            "hostname_override": cluster.krb_hostname_override(),
            "sanitize_mutual_error_response": True,
            "send_cbt": False,
        }

    file_path = os.path.join(SPARKMAGIC_CONF_DIR, SPARKMAGIC_CONF_FILE)
    if spark_magic_override_path:
        logger.info(
            f"Using override path for SparkMagic config file: {spark_magic_override_path}"
        )
        file_path = spark_magic_override_path
    else:
        os.makedirs(SPARKMAGIC_CONF_DIR, exist_ok=True)
    with open(file_path, "w") as f:
        json.dump(basic_conf, f, indent=2)