Skip to content

optimizer

OptimizerHook

Bases: BaseHook

  1. Executes the wrapped hook
  2. Zeros all gradients
  3. Backpropagates the loss
  4. Steps the optimizer
Source code in pytorch_adapt\hooks\optimizer.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
class OptimizerHook(BaseHook):
    """
    1. Executes the wrapped hook
    2. Zeros all gradients
    3. Backpropagates the loss
    4. Steps the optimizer
    """

    def __init__(
        self,
        hook: BaseHook,
        optimizers: Union[List[torch.optim.Optimizer], List[str]],
        weighter: BaseWeighter = None,
        reducer: BaseReducer = None,
        **kwargs
    ):
        """
        Arguments:
            hook: the hook that computes the losses
            optimizers: either a list of optimizers that will be used
                to update model weights, or a list of optimizer names.
                If it's the latter, then the optimizers must be passed
                into the hook as one of the ```inputs```.
            weighter: weights the returned losses and outputs a
                single value on which ```.backward()``` is called.
                If ```None```, then it defaults to
                [```MeanWeighter```][pytorch_adapt.weighters.MeanWeighter].
            reducer: a hook that reduces any unreduced losses to a single value.
                If ```None```, then it defaults to
                [```MeanReducer```][pytorch_adapt.hooks.MeanReducer].
        """
        super().__init__(**kwargs)
        self.hook = hook
        self.optimizers = optimizers
        self.weighter = c_f.default(weighter, MeanWeighter, {})
        self.reducer = c_f.default(reducer, MeanReducer, {})
        self.loss_components = {}

    def call(self, inputs, losses):
        """"""
        outputs, losses = self.hook(inputs, losses)
        combined = c_f.assert_dicts_are_disjoint(inputs, outputs)
        new_outputs, losses = self.reducer(combined, losses)
        outputs.update(new_outputs)
        loss, self.loss_components = self.weighter(losses)
        optimizers = self.optimizers
        if isinstance(optimizers[0], str):
            optimizers = c_f.extract(inputs, optimizers)
        c_f.zero_back_step(loss, optimizers, inputs.get("custom_backward"))
        return outputs, {}

    def _loss_keys(self):
        """"""
        return []

    def _out_keys(self):
        """"""
        return c_f.join_lists([self.hook.out_keys, self.reducer.out_keys])

    def extra_repr(self):
        return c_f.extra_repr(self, ["optimizers", "weighter"])

__init__(hook, optimizers, weighter=None, reducer=None, **kwargs)

Parameters:

Name Type Description Default
hook BaseHook

the hook that computes the losses

required
optimizers Union[List[torch.optim.Optimizer], List[str]]

either a list of optimizers that will be used to update model weights, or a list of optimizer names. If it's the latter, then the optimizers must be passed into the hook as one of the inputs.

required
weighter BaseWeighter

weights the returned losses and outputs a single value on which .backward() is called. If None, then it defaults to MeanWeighter.

None
reducer BaseReducer

a hook that reduces any unreduced losses to a single value. If None, then it defaults to MeanReducer.

None
Source code in pytorch_adapt\hooks\optimizer.py
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
def __init__(
    self,
    hook: BaseHook,
    optimizers: Union[List[torch.optim.Optimizer], List[str]],
    weighter: BaseWeighter = None,
    reducer: BaseReducer = None,
    **kwargs
):
    """
    Arguments:
        hook: the hook that computes the losses
        optimizers: either a list of optimizers that will be used
            to update model weights, or a list of optimizer names.
            If it's the latter, then the optimizers must be passed
            into the hook as one of the ```inputs```.
        weighter: weights the returned losses and outputs a
            single value on which ```.backward()``` is called.
            If ```None```, then it defaults to
            [```MeanWeighter```][pytorch_adapt.weighters.MeanWeighter].
        reducer: a hook that reduces any unreduced losses to a single value.
            If ```None```, then it defaults to
            [```MeanReducer```][pytorch_adapt.hooks.MeanReducer].
    """
    super().__init__(**kwargs)
    self.hook = hook
    self.optimizers = optimizers
    self.weighter = c_f.default(weighter, MeanWeighter, {})
    self.reducer = c_f.default(reducer, MeanReducer, {})
    self.loss_components = {}

SummaryHook

Bases: BaseHook

Repackages losses into a dictionary format useful for logging. This should be used only at the very end of each iteration, i.e. it should be the last sub-hook in a ChainHook.

Source code in pytorch_adapt\hooks\optimizer.py
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
class SummaryHook(BaseHook):
    """
    Repackages losses into a dictionary format useful for logging.
    This should be used only at the very end of each
    iteration, i.e. it should be the last sub-hook
    in a [ChainHook][pytorch_adapt.hooks.ChainHook].
    """

    def __init__(self, optimizers: Dict[str, OptimizerHook], **kwargs):
        """
        Arguments:
            optimizers: A dictionary of optimizer hooks.
                The losses computed inside these hooks
                will be packaged into nested dictionaries.
        """
        super().__init__(**kwargs)
        self.optimizers = optimizers

    def call(self, inputs, losses):
        """"""
        losses = {}
        for k, v in self.optimizers.items():
            losses[k] = v.loss_components
        return {}, losses

    def _loss_keys(self):
        """"""
        return list(self.optimizers.keys())

    def _out_keys(self):
        """"""
        return []

__init__(optimizers, **kwargs)

Parameters:

Name Type Description Default
optimizers Dict[str, OptimizerHook]

A dictionary of optimizer hooks. The losses computed inside these hooks will be packaged into nested dictionaries.

required
Source code in pytorch_adapt\hooks\optimizer.py
82
83
84
85
86
87
88
89
90
def __init__(self, optimizers: Dict[str, OptimizerHook], **kwargs):
    """
    Arguments:
        optimizers: A dictionary of optimizer hooks.
            The losses computed inside these hooks
            will be packaged into nested dictionaries.
    """
    super().__init__(**kwargs)
    self.optimizers = optimizers