public static ShellResult configure()

in bigtop-manager-stack/bigtop-manager-stack-bigtop/src/main/java/org/apache/bigtop/manager/stack/bigtop/v3_3_0/hadoop/HadoopSetup.java [47:219]


    public static ShellResult configure(Params params, String componentName) {
        log.info("Configuring Hadoop");
        HadoopParams hadoopParams = (HadoopParams) params;

        String confDir = hadoopParams.confDir();
        String hadoopUser = hadoopParams.user();
        String hadoopGroup = hadoopParams.group();
        Map<String, Object> hadoopEnv = hadoopParams.hadoopEnv();
        Map<String, Object> yarnEnv = hadoopParams.yarnEnv();
        Map<String, Object> mapredEnv = hadoopParams.mapredEnv();

        if (StringUtils.isNotBlank(componentName)) {
            switch (componentName) {
                case "namenode": {
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getDfsNameNodeDir(), hadoopUser, hadoopGroup, Constants.PERMISSION_755, true);
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getDfsNameNodeCheckPointDir(),
                            hadoopUser,
                            hadoopGroup,
                            Constants.PERMISSION_755,
                            true);
                }
                case "secondarynamenode": {
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getDfsNameNodeCheckPointDir(),
                            hadoopUser,
                            hadoopGroup,
                            Constants.PERMISSION_755,
                            true);
                }
                case "datanode": {
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getDfsDomainSocketPathPrefix(),
                            hadoopUser,
                            hadoopGroup,
                            Constants.PERMISSION_755,
                            true);
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getDfsDataDir(), hadoopUser, hadoopGroup, Constants.PERMISSION_755, true);
                }
                case "nodemanager": {
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getNodeManagerLogDir(),
                            hadoopUser,
                            hadoopGroup,
                            Constants.PERMISSION_755,
                            true);
                    LinuxFileUtils.createDirectories(
                            hadoopParams.getNodeManagerLocalDir(),
                            hadoopUser,
                            hadoopGroup,
                            Constants.PERMISSION_755,
                            true);
                }
            }
        }

        // mkdir directories
        LinuxFileUtils.createDirectories(
                hadoopParams.getHadoopLogDir(), hadoopUser, hadoopGroup, Constants.PERMISSION_755, true);
        LinuxFileUtils.createDirectories(
                hadoopParams.getHadoopPidDir(), hadoopUser, hadoopGroup, Constants.PERMISSION_755, true);

        LinuxFileUtils.toFileByTemplate(
                hadoopParams.hadoopLimits(),
                MessageFormat.format("{0}/hadoop.conf", HadoopParams.LIMITS_CONF_DIR),
                Constants.ROOT_USER,
                Constants.ROOT_USER,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFileByTemplate(
                hadoopEnv.get("content").toString(),
                MessageFormat.format("{0}/hadoop-env.sh", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFile(
                ConfigType.XML,
                MessageFormat.format("{0}/core-site.xml", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.coreSite());

        LinuxFileUtils.toFile(
                ConfigType.XML,
                MessageFormat.format("{0}/hdfs-site.xml", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.hdfsSite());

        LinuxFileUtils.toFile(
                ConfigType.XML,
                MessageFormat.format("{0}/hadoop-policy.xml", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.hadoopPolicy());

        LinuxFileUtils.toFileByTemplate(
                hadoopParams.workers(),
                MessageFormat.format("{0}/workers", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFileByTemplate(
                hadoopParams.hdfsLog4j().get("content").toString(),
                MessageFormat.format("{0}/log4j.properties", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFileByTemplate(
                yarnEnv.get("content").toString(),
                MessageFormat.format("{0}/yarn-env.sh", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFile(
                ConfigType.XML,
                MessageFormat.format("{0}/yarn-site.xml", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.yarnSite(),
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFileByTemplate(
                hadoopParams.yarnLog4j().get("content").toString(),
                MessageFormat.format("{0}/yarnservice-log4j.properties", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFileByTemplate(
                mapredEnv.get("content").toString(),
                MessageFormat.format("{0}/mapred-env.sh", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.getGlobalParamsMap());

        LinuxFileUtils.toFile(
                ConfigType.XML,
                MessageFormat.format("{0}/mapred-site.xml", confDir),
                hadoopUser,
                hadoopGroup,
                Constants.PERMISSION_644,
                hadoopParams.mapredSite(),
                hadoopParams.getGlobalParamsMap());

        //        HdfsUtil.createDirectory(hadoopUser, "/apps");
        //        HdfsUtil.createDirectory(hadoopUser, "/app-logs");
        //        HdfsUtil.createDirectory(hadoopUser, "/apps/mapred");
        //        HdfsUtil.createDirectory(hadoopUser, "/apps/mapred/staging");
        //        HdfsUtil.createDirectory(hadoopUser, "/apps/mapred/history");
        //        HdfsUtil.createDirectory(hadoopUser, "/apps/mapred/history/tmp");
        //        HdfsUtil.createDirectory(hadoopUser, "/apps/mapred/history/done");

        log.info("Successfully configured Hadoop");
        return ShellResult.success();
    }