def update_zeppelin_interpreter()

in ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/zeppelin_server.py [0:0]


  def update_zeppelin_interpreter(self):
    import params

    config_data = self.get_interpreter_settings()
    interpreter_settings = config_data["interpreterSettings"]

    exclude_interpreter_autoconfig_list = []
    exclude_interpreter_property_groups_map = {}

    if params.exclude_interpreter_autoconfig:
      excluded_interpreters = params.exclude_interpreter_autoconfig.strip().split(";")
      for interpreter in excluded_interpreters:
        if interpreter and interpreter.strip():
          splitted_line = interpreter.split("(")
          interpreter_name = splitted_line[0].strip()
          exclude_interpreter_autoconfig_list.append(interpreter_name)
          if len(splitted_line) > 1:
            property_groups_list = splitted_line[1].replace(")", "").strip().split(",")
            if len(property_groups_list) > 0 and property_groups_list[0]:
              exclude_interpreter_property_groups_map[interpreter_name] = (
                property_groups_list
              )

    if params.zeppelin_interpreter:
      settings_to_delete = []
      for settings_key, interpreter in interpreter_settings.items():
        if interpreter["group"] not in params.zeppelin_interpreter:
          settings_to_delete.append(settings_key)

      for key in settings_to_delete:
        del interpreter_settings[key]

    hive_interactive_properties_key = "hive_interactive"
    for setting_key in list(interpreter_settings.keys()):
      interpreter = interpreter_settings[setting_key]
      if (
        interpreter["group"] == "jdbc"
        and interpreter["name"] == "jdbc"
        and (
          "jdbc" not in exclude_interpreter_autoconfig_list
          or "jdbc" in exclude_interpreter_property_groups_map.keys()
        )
      ):
        interpreter["dependencies"] = []
        jdbc_property_groups = []
        if "jdbc" in exclude_interpreter_property_groups_map.keys():
          jdbc_property_groups = exclude_interpreter_property_groups_map.get("jdbc")
        if not params.hive_server_host and params.hive_server_interactive_hosts:
          hive_interactive_properties_key = "hive"

        if params.hive_server_host and "hive-server" not in jdbc_property_groups:
          self.storePropertyToInterpreter(
            interpreter, "hive.driver", "string", "org.apache.hive.jdbc.HiveDriver"
          )
          self.storePropertyToInterpreter(interpreter, "hive.user", "string", "hive")
          self.storePropertyToInterpreter(interpreter, "hive.password", "string", "")
          self.storePropertyToInterpreter(
            interpreter, "hive.proxy.user.property", "string", "hive.server2.proxy.user"
          )
          if params.hive_server2_support_dynamic_service_discovery:
            self.storePropertyToInterpreter(
              interpreter,
              "hive.url",
              "string",
              "jdbc:hive2://"
              + params.hive_zookeeper_quorum
              + "/;"
              + "serviceDiscoveryMode="
              + params.discovery_mode
              + ";zooKeeperNamespace="
              + params.hive_zookeeper_namespace,
            )
          else:
            self.storePropertyToInterpreter(
              interpreter,
              "hive.url",
              "string",
              "jdbc:hive2://" + params.hive_server_host + ":" + params.hive_server_port,
            )
          if "hive.splitQueries" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "hive.splitQueries", "string", "true"
            )

        if (
          params.hive_server_interactive_hosts
          and "hive-interactive" not in jdbc_property_groups
        ):
          self.storePropertyToInterpreter(
            interpreter,
            hive_interactive_properties_key + ".driver",
            "string",
            "org.apache.hive.jdbc.HiveDriver",
          )
          self.storePropertyToInterpreter(
            interpreter, hive_interactive_properties_key + ".user", "string", "hive"
          )
          self.storePropertyToInterpreter(
            interpreter, hive_interactive_properties_key + ".password", "string", ""
          )
          self.storePropertyToInterpreter(
            interpreter,
            hive_interactive_properties_key + ".proxy.user.property",
            "string",
            "hive.server2.proxy.user",
          )
          if params.hive_server2_support_dynamic_service_discovery:
            self.storePropertyToInterpreter(
              interpreter,
              hive_interactive_properties_key + ".url",
              "string",
              "jdbc:hive2://"
              + params.hive_zookeeper_quorum
              + "/;"
              + "serviceDiscoveryMode="
              + params.discovery_mode
              + ";zooKeeperNamespace="
              + params.hive_interactive_zookeeper_namespace,
            )
          else:
            self.storePropertyToInterpreter(
              interpreter,
              hive_interactive_properties_key + ".url",
              "string",
              "jdbc:hive2://"
              + params.hive_server_interactive_hosts
              + ":"
              + params.hive_server_port,
            )
          if (
            hive_interactive_properties_key + ".splitQueries"
            not in interpreter["properties"]
          ):
            self.storePropertyToInterpreter(
              interpreter,
              hive_interactive_properties_key + ".splitQueries",
              "string",
              "true",
            )

        if params.spark2_thrift_server_hosts and "spark2" not in jdbc_property_groups:
          self.storePropertyToInterpreter(
            interpreter,
            "spark2.driver",
            "string",
            "org.apache.spark-project.org.apache.hive.jdbc.HiveDriver",
          )
          self.storePropertyToInterpreter(interpreter, "spark2.user", "string", "hive")
          self.storePropertyToInterpreter(interpreter, "spark2.password", "string", "")
          self.storePropertyToInterpreter(
            interpreter,
            "spark2.proxy.user.property",
            "string",
            "hive.server2.proxy.user",
          )
          self.storePropertyToInterpreter(
            interpreter,
            "spark2.url",
            "string",
            "jdbc:hive2://"
            + params.spark2_thrift_server_hosts
            + ":"
            + params.spark2_hive_thrift_port
            + "/",
          )

          if params.spark2_hive_principal:
            self.storePropertyToInterpreter(
              interpreter,
              "spark2.url",
              "string",
              ";principal=" + params.spark2_hive_principal,
              "add",
            )
          if params.spark2_transport_mode:
            self.storePropertyToInterpreter(
              interpreter,
              "spark2.url",
              "string",
              ";transportMode=" + params.spark2_transport_mode,
              "add",
            )
          if params.spark2_http_path:
            self.storePropertyToInterpreter(
              interpreter,
              "spark2.url",
              "string",
              ";httpPath=" + params.spark2_http_path,
              "add",
            )
          if params.spark2_ssl:
            self.storePropertyToInterpreter(
              interpreter, "spark2.url", "string", ";ssl=true", "add"
            )
          if "spark2.splitQueries" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "spark2.splitQueries", "string", "true"
            )

        if (
          params.zookeeper_znode_parent
          and params.hbase_zookeeper_quorum
          and "hbase" not in jdbc_property_groups
        ):
          self.storePropertyToInterpreter(
            interpreter,
            "phoenix.driver",
            "string",
            "org.apache.phoenix.jdbc.PhoenixDriver",
          )
          if "phoenix.hbase.client.retries.number" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "phoenix.hbase.client.retries.number", "string", "1"
            )
          if "phoenix.phoenix.query.numberFormat" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "phoenix.phoenix.query.numberFormat", "string", "#.#"
            )
          if "phoenix.user" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "phoenix.user", "string", "phoenixuser"
            )
          if "phoenix.password" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "phoenix.password", "string", ""
            )
          self.storePropertyToInterpreter(
            interpreter,
            "phoenix.url",
            "string",
            "jdbc:phoenix:"
            + params.hbase_zookeeper_quorum
            + ":"
            + params.zookeeper_znode_parent,
          )

          if "phoenix.splitQueries" not in interpreter["properties"]:
            self.storePropertyToInterpreter(
              interpreter, "phoenix.splitQueries", "string", "true"
            )

      elif (
        interpreter["group"] == "livy"
        and interpreter["name"] == "livy2"
        and "livy2" not in exclude_interpreter_autoconfig_list
      ):
        # Honor this Zeppelin setting if it exists
        if "zeppelin.livy.url" in params.config["configurations"]["zeppelin-site"]:
          interpreter["properties"]["zeppelin.livy.url"] = params.config[
            "configurations"
          ]["zeppelin-site"]["zeppelin.livy.url"]
        elif params.livy2_livyserver_host:
          self.storePropertyToInterpreter(
            interpreter,
            "zeppelin.livy.url",
            "string",
            params.livy2_livyserver_protocol
            + "://"
            + params.livy2_livyserver_host
            + ":"
            + params.livy2_livyserver_port,
          )
        else:
          del interpreter_settings[setting_key]

      elif (
        interpreter["group"] == "spark"
        and interpreter["name"] == "spark2"
        and "spark2" not in exclude_interpreter_autoconfig_list
      ):
        if "spark2-env" in params.config["configurations"]:
          self.storePropertyToInterpreter(
            interpreter, "master", "string", "yarn-client"
          )
          self.storePropertyToInterpreter(
            interpreter, "SPARK_HOME", "string", "/usr/hdp/current/spark2-client/"
          )
        else:
          del interpreter_settings[setting_key]

    self.set_interpreter_settings(config_data)
    self.update_kerberos_properties()