in src/model.py [0:0]
def flip_attributes(attributes, params, attribute_id, new_value=None):
"""
Randomly flip a set of attributes.
"""
assert attributes.size(1) == params.n_attr
mappings = get_mappings(params)
attributes = attributes.data.clone().cpu()
def flip_attribute(attribute_id, new_value=None):
bs = attributes.size(0)
i, j = mappings[attribute_id]
attributes[:, i:j].zero_()
if new_value is None:
y = torch.LongTensor(bs).random_(j - i)
else:
assert new_value in range(j - i)
y = torch.LongTensor(bs).fill_(new_value)
attributes[:, i:j].scatter_(1, y.unsqueeze(1), 1)
if attribute_id == 'all':
assert new_value is None
for attribute_id in range(len(params.attr)):
flip_attribute(attribute_id)
else:
assert type(new_value) is int
flip_attribute(attribute_id, new_value)
return Variable(attributes.cuda())