in entrypoint.py [0:0]
def _xgb_train(params, dtrain, evals, num_boost_round, model_dir, is_master):
"""Run xgb train on arguments given with rabit initialized.
This is our rabit execution function.
:param args_dict: Argument dictionary used to run xgb.train().
:param is_master: True if current node is master host in distributed training,
or is running single node training job.
Note that rabit_run will include this argument.
"""
booster = xgb.train(params=params,
dtrain=dtrain,
evals=evals,
num_boost_round=num_boost_round)
if is_master:
model_location = model_dir + '/xgboost-model'
pkl.dump(booster, open(model_location, 'wb'))
logging.info("Stored trained model at {}".format(model_location))