in ccmlib/dse/dse_node.py [0:0]
def make_dse_env(self, install_dir, node_path, node_ip):
version = DseNode.get_version_from_build(node_path=node_path)
env = os.environ.copy()
env['MAX_HEAP_SIZE'] = os.environ.get('CCM_MAX_HEAP_SIZE', '500M')
env['HEAP_NEWSIZE'] = os.environ.get('CCM_HEAP_NEWSIZE', '50M')
if version < '6.0':
env['SPARK_WORKER_MEMORY'] = os.environ.get('SPARK_WORKER_MEMORY', '1024M')
env['SPARK_WORKER_CORES'] = os.environ.get('SPARK_WORKER_CORES', '2')
else:
env['ALWAYSON_SQL_LOG_DIR'] = os.path.join(node_path, 'logs')
env['DSE_HOME'] = os.path.join(install_dir)
env['DSE_CONF'] = os.path.join(node_path, 'resources', 'dse', 'conf')
env['CASSANDRA_HOME'] = os.path.join(install_dir, 'resources', 'cassandra')
env['CASSANDRA_CONF'] = os.path.join(node_path, 'resources', 'cassandra', 'conf')
env['HIVE_CONF_DIR'] = os.path.join(node_path, 'resources', 'hive', 'conf')
env['SQOOP_CONF_DIR'] = os.path.join(node_path, 'resources', 'sqoop', 'conf')
env['TOMCAT_HOME'] = os.path.join(node_path, 'resources', 'tomcat')
env['TOMCAT_CONF_DIR'] = os.path.join(node_path, 'resources', 'tomcat', 'conf')
env['PIG_CONF_DIR'] = os.path.join(node_path, 'resources', 'pig', 'conf')
env['MAHOUT_CONF_DIR'] = os.path.join(node_path, 'resources', 'mahout', 'conf')
env['SPARK_CONF_DIR'] = os.path.join(node_path, 'resources', 'spark', 'conf')
env['SHARK_CONF_DIR'] = os.path.join(node_path, 'resources', 'shark', 'conf')
env['GREMLIN_CONSOLE_CONF_DIR'] = os.path.join(node_path, 'resources', 'graph', 'gremlin-console', 'conf')
env['SPARK_WORKER_DIR'] = os.path.join(node_path, 'spark', 'worker')
env['SPARK_LOCAL_DIRS'] = os.path.join(node_path, 'spark', '.local')
env['SPARK_EXECUTOR_DIRS'] = os.path.join(node_path, 'spark', 'rdd')
env['SPARK_WORKER_LOG_DIR'] = os.path.join(node_path, 'logs', 'spark', 'worker')
env['SPARK_MASTER_LOG_DIR'] = os.path.join(node_path, 'logs', 'spark', 'master')
env['DSE_LOG_ROOT'] = os.path.join(node_path, 'logs', 'dse')
env['CASSANDRA_LOG_DIR'] = os.path.join(node_path, 'logs')
env['SPARK_LOCAL_IP'] = '' + node_ip
if version >= '5.0':
env['HADOOP1_CONF_DIR'] = os.path.join(node_path, 'resources', 'hadoop', 'conf')
env['HADOOP2_CONF_DIR'] = os.path.join(node_path, 'resources', 'hadoop2-client', 'conf')
else:
env['HADOOP_CONF_DIR'] = os.path.join(node_path, 'resources', 'hadoop', 'conf')
return env