loader/loader.py [57:79]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
						   args["max_file_per_region"],
						   args["label_mapping"],
						   args["auto_create_edge"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def distcp(args): 
	cmd = "hadoop distcp -overwrite -m %s -bandwidth %s /tmp/%s %s/tmp/%s" % (args["-m"], args["-bandwidth"], args["htable_name"], args["hbase_namenode"], args["htable_name"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def chmod(args):
	cmd = "export HADOOP_CONF_DIR=%s; export HADOOP_USER_NAME=hdfs; hadoop fs -chmod -R 777 /tmp/%s" % (args["HADOOP_CONF_DIR"], args["htable_name"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def load(args):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



s2jobs/loader.py [71:93]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
						  args["max_file_per_region"],
						  args["label_mapping"],
						  args["auto_create_edge"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def distcp(args):
	cmd = "hadoop distcp -overwrite -m %s -bandwidth %s /tmp/%s %s/tmp/%s" % (args["-m"], args["-bandwidth"], args["htable_name"], args["hbase_namenode"], args["htable_name"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def chmod(args):
	cmd = "export HADOOP_CONF_DIR=%s; export HADOOP_USER_NAME=hdfs; hadoop fs -chmod -R 777 /tmp/%s" % (args["HADOOP_CONF_DIR"], args["htable_name"])
	print(cmd)
	ret = os.system(cmd)
	print(cmd, "return", ret)
	return ret

def load(args):
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



