Skip to content

entropy_loss

EntropyLoss

Bases: torch.nn.Module

Encourages low entropy predictions, or in other words, "confident" predictions.

Source code in pytorch_adapt\layers\entropy_loss.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
class EntropyLoss(torch.nn.Module):
    """
    Encourages low entropy predictions, or in other words, "confident" predictions.
    """

    def __init__(self, after_softmax: bool = False, return_mean: bool = True):
        """
        Arguments:
            after_softmax: If ```True```, then the rows of the input are assumed to
                already have softmax applied to them.
            return_mean: If ```True```, the mean entropy will be returned.
                If ```False```, the entropy per row of the input will be returned.
        """
        super().__init__()
        self.after_softmax = after_softmax
        self.return_mean = return_mean

    def forward(self, logits: torch.Tensor) -> torch.Tensor:
        """
        Arguments:
            logits: Raw logits if ```self.after_softmax``` is False.
                Otherwise each row should be predictions that sum up to 1.
        """
        entropies = get_entropy(logits, self.after_softmax)
        if self.return_mean:
            return torch.mean(entropies)
        return entropies

    def extra_repr(self):
        """"""
        return c_f.extra_repr(self, ["after_softmax"])

__init__(after_softmax=False, return_mean=True)

Parameters:

Name Type Description Default
after_softmax bool

If True, then the rows of the input are assumed to already have softmax applied to them.

False
return_mean bool

If True, the mean entropy will be returned. If False, the entropy per row of the input will be returned.

True
Source code in pytorch_adapt\layers\entropy_loss.py
31
32
33
34
35
36
37
38
39
40
41
def __init__(self, after_softmax: bool = False, return_mean: bool = True):
    """
    Arguments:
        after_softmax: If ```True```, then the rows of the input are assumed to
            already have softmax applied to them.
        return_mean: If ```True```, the mean entropy will be returned.
            If ```False```, the entropy per row of the input will be returned.
    """
    super().__init__()
    self.after_softmax = after_softmax
    self.return_mean = return_mean

forward(logits)

Parameters:

Name Type Description Default
logits torch.Tensor

Raw logits if self.after_softmax is False. Otherwise each row should be predictions that sum up to 1.

required
Source code in pytorch_adapt\layers\entropy_loss.py
43
44
45
46
47
48
49
50
51
52
def forward(self, logits: torch.Tensor) -> torch.Tensor:
    """
    Arguments:
        logits: Raw logits if ```self.after_softmax``` is False.
            Otherwise each row should be predictions that sum up to 1.
    """
    entropies = get_entropy(logits, self.after_softmax)
    if self.return_mean:
        return torch.mean(entropies)
    return entropies