def remove_dataengine_kernels()

in infrastructure-provisioning/src/general/lib/azure/actions_lib.py [0:0]


    def remove_dataengine_kernels(self, resource_group_name, notebook_name, os_user, key_path, cluster_name):
        try:
            private = datalab.meta_lib.AzureMeta().get_private_ip_address(resource_group_name, notebook_name)
            global conn
            conn = datalab.fab.init_datalab_connection(private, os_user, key_path)
            conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
            if exists(conn, '/home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name)):
                if os.environ['notebook_multiple_clusters'] == 'true':
                    try:
                        livy_port = conn.sudo("cat /opt/" + cluster_name +
                                              "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | awk '{printf $3}'").stdout.replace(
                            '\n', '')
                        process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                                   "' | awk '{print $7}' | sed 's|/.*||g'").stdout.replace('\n', '')
                        conn.sudo('kill -9 ' + process_number)
                        conn.sudo('systemctl disable livy-server-' + livy_port)
                    except:
                        print("Wasn't able to find Livy server for this dataengine!")
                conn.sudo(
                    'sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
                conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
                zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
                opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
                req = opener.open(urllib.request.Request(zeppelin_url))
                r_text = req.read()
                interpreter_json = json.loads(r_text)
                interpreter_prefix = cluster_name
                for interpreter in interpreter_json['body']:
                    if interpreter_prefix in interpreter['name']:
                        print("Interpreter with ID: {0} and name: {1} will be removed from zeppelin!".
                              format(interpreter['id'], interpreter['name']))
                        request = urllib.request.Request(zeppelin_url + interpreter['id'], data=''.encode())
                        request.get_method = lambda: 'DELETE'
                        url = opener.open(request)
                        print(url.read())
                conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
                conn.sudo('systemctl daemon-reload')
                conn.sudo("service zeppelin-notebook stop")
                conn.sudo("service zeppelin-notebook start")
                zeppelin_restarted = False
                while not zeppelin_restarted:
                    conn.sudo('sleep 5')
                    result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?').stdout
                    result = result[:1]
                    if result == '1':
                        zeppelin_restarted = True
                conn.sudo('sleep 5')
                conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
            if exists(conn, '/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
                datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
            if exists(conn, '/home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(os_user,
                                                                                                    cluster_name)):
                conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine-service_interpreter_ensure".format(os_user))
                zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
                opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
                req = opener.open(urllib.request.Request(zeppelin_url))
                r_text = req.read()
                interpreter_json = json.loads(r_text)
                interpreter_prefix = cluster_name
                for interpreter in interpreter_json['body']:
                    if interpreter_prefix in interpreter['name']:
                        print("Interpreter with ID: {0} and name: {1} will be removed from zeppelin!".
                              format(interpreter['id'], interpreter['name']))
                        request = urllib.request.Request(zeppelin_url + interpreter['id'], data=''.encode())
                        request.get_method = lambda: 'DELETE'
                        url = opener.open(request)
                        print(url.read())
                conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
                conn.sudo('systemctl daemon-reload')
                conn.sudo("service zeppelin-notebook stop")
                conn.sudo("service zeppelin-notebook start")
                zeppelin_restarted = False
                while not zeppelin_restarted:
                    conn.sudo('sleep 5')
                    result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?').stdout
                    result = result[:1]
                    if result == '1':
                        zeppelin_restarted = True
                conn.sudo('sleep 5')
                conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(os_user,
                                                                                                         cluster_name))
            if exists(conn, '/home/{}/.ensure_dir/hdinsight_secret_ensured'.format(os_user)):
                conn.sudo("sed -i '/-access-password/d' /home/{}/.Renviron".format(os_user))
            if exists(conn, '/home/{}/.ensure_dir/sparkmagic_kernels_ensured'.format(os_user)):
                conn.sudo('rm -rf /home/{0}/.local/share/jupyter/kernels/pysparkkernel/ '
                          '/home/{0}/.local/share/jupyter/kernels/sparkkernel/ '
                          '/home/{0}/.sparkmagic/ '
                          '/home/{0}/.ensure_dir/sparkmagic_kernels_ensured'.format(os_user))
            conn.sudo('rm -rf  /opt/' + cluster_name + '/')
            print("Notebook's {} kernels were removed".format(private))
        except Exception as err:
            logging.info("Unable to remove kernels on Notebook: " + str(err) + "\n Traceback: " + traceback.print_exc(
                file=sys.stdout))
            append_result(str({"error": "Unable to remove kernels on Notebook",
                               "error_message": str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout)}))
            traceback.print_exc(file=sys.stdout)