def log_gradient_values()

in utils.py [0:0]


def log_gradient_values(grads, variables, global_step, model_dir):
    loggrads = []
    with tf.name_scope("log_gradient_values"):
        for i, (grad, param) in enumerate(zip(grads, variables)):
            name = param.op.name + "_" + "_".join(
                str(x) for x in param.shape.as_list())
            loggrads.append(bs.log_stats(
                grad, step=global_step, name=name,
                logfile=os.path.join(model_dir, 'grad_stats.txt')))
    return loggrads