Skip to content

base_weighter

BaseWeighter

Multiplies losses by scalar values, and then reduces them to a single value.

Source code in pytorch_adapt\weighters\base_weighter.py
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
class BaseWeighter:
    """
    Multiplies losses by scalar values, and then reduces them to a single value.
    """

    def __init__(
        self,
        reduction: Callable[[List[torch.Tensor]], torch.Tensor],
        weights: Dict[str, float] = None,
        scale: float = 1,
    ):
        """
        Arguments:
            reduction: A function that takes in a list of losses and returns a single loss value.
            weights: A mapping from loss names to weight values. If ```None```, weights are assumed to be 1.
            scale: A scalar that every loss gets multiplied by.
        """
        self.reduction = reduction
        self.weights = c_f.default(weights, {})
        self.scale = scale
        pml_cf.add_to_recordable_attributes(self, list_of_names=["weights", "scale"])

    def __call__(
        self, loss_dict: Dict[str, torch.Tensor]
    ) -> Tuple[torch.Tensor, Dict[str, float]]:
        """
        Arguments:
            loss_dict: A mapping from loss names to loss values.
        Returns:
            A tuple consisting of

                - the loss that .backward() can be called on

                - a dictionary of floats (detached from the autograd graph)
                        that contains the weighted loss components.
        """
        return weight_losses(self.reduction, self.weights, self.scale, loss_dict)

    def __repr__(self):
        return c_f.nice_repr(self, c_f.extra_repr(self, ["weights", "scale"]), {})

__call__(loss_dict)

Parameters:

Name Type Description Default
loss_dict Dict[str, torch.Tensor]

A mapping from loss names to loss values.

required

Returns:

Type Description
Tuple[torch.Tensor, Dict[str, float]]

A tuple consisting of

  • the loss that .backward() can be called on

  • a dictionary of floats (detached from the autograd graph) that contains the weighted loss components.

Source code in pytorch_adapt\weighters\base_weighter.py
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
def __call__(
    self, loss_dict: Dict[str, torch.Tensor]
) -> Tuple[torch.Tensor, Dict[str, float]]:
    """
    Arguments:
        loss_dict: A mapping from loss names to loss values.
    Returns:
        A tuple consisting of

            - the loss that .backward() can be called on

            - a dictionary of floats (detached from the autograd graph)
                    that contains the weighted loss components.
    """
    return weight_losses(self.reduction, self.weights, self.scale, loss_dict)

__init__(reduction, weights=None, scale=1)

Parameters:

Name Type Description Default
reduction Callable[[List[torch.Tensor]], torch.Tensor]

A function that takes in a list of losses and returns a single loss value.

required
weights Dict[str, float]

A mapping from loss names to weight values. If None, weights are assumed to be 1.

None
scale float

A scalar that every loss gets multiplied by.

1
Source code in pytorch_adapt\weighters\base_weighter.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
def __init__(
    self,
    reduction: Callable[[List[torch.Tensor]], torch.Tensor],
    weights: Dict[str, float] = None,
    scale: float = 1,
):
    """
    Arguments:
        reduction: A function that takes in a list of losses and returns a single loss value.
        weights: A mapping from loss names to weight values. If ```None```, weights are assumed to be 1.
        scale: A scalar that every loss gets multiplied by.
    """
    self.reduction = reduction
    self.weights = c_f.default(weights, {})
    self.scale = scale
    pml_cf.add_to_recordable_attributes(self, list_of_names=["weights", "scale"])