in trainer/loss.py [0:0]
def __init__(self, temperature=0.03, negative_weight=0.8, logger = None):
super().__init__()
self.logit_scale = nn.Parameter(torch.ones([]))
self.criterion = torch.nn.CrossEntropyLoss(reduction='none')
self.temperature = temperature
self.logger = logger
self.negative_w = negative_weight # Weight of negative samples logits.