Bases: BaseAdapter
Wraps ADDAHook.
Container |
Required keys |
models |
["G", "C", "D"] |
optimizers |
["D", "T"] |
The target model ("T") is created during initialization by deep-copying the G model.
Source code in pytorch_adapt\adapters\adda.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58 | class ADDA(BaseAdapter):
"""
Wraps [ADDAHook][pytorch_adapt.hooks.ADDAHook].
|Container|Required keys|
|---|---|
|models|```["G", "C", "D"]```|
|optimizers|```["D", "T"]```|
The target model ("T") is created during initialization by deep-copying the G model.
"""
def __init__(self, *args, inference_fn=None, **kwargs):
"""
Arguments:
inference_fn: Default is [adda_fn][pytorch_adapt.inference.adda_fn]
"""
inference_fn = c_f.default(inference_fn, adda_fn)
super().__init__(*args, inference_fn=inference_fn, **kwargs)
def get_default_containers(self) -> MultipleContainers:
"""
Returns:
The default set of containers. This will use the
[default optimizer][pytorch_adapt.adapters.utils.default_optimizer_tuple]
for the T and D models.
"""
optimizers = Optimizers(default_optimizer_tuple(), keys=["T", "D"])
return MultipleContainers(optimizers=optimizers)
def get_key_enforcer(self) -> KeyEnforcer:
return KeyEnforcer(
models=["G", "C", "D", "T"],
optimizers=["D", "T"],
)
def init_hook(self, hook_kwargs):
self.hook = self.hook_cls(
d_opts=with_opt(["D"]), g_opts=with_opt(["T"]), **hook_kwargs
)
def init_containers_and_check_keys(self, containers):
containers["models"]["T"] = copy.deepcopy(containers["models"]["G"])
super().init_containers_and_check_keys(containers)
@property
def hook_cls(self):
return ADDAHook
|
__init__(*args, inference_fn=None, **kwargs)
Parameters:
Name |
Type |
Description |
Default |
inference_fn |
|
Default is adda_fn |
None
|
Source code in pytorch_adapt\adapters\adda.py
| def __init__(self, *args, inference_fn=None, **kwargs):
"""
Arguments:
inference_fn: Default is [adda_fn][pytorch_adapt.inference.adda_fn]
"""
inference_fn = c_f.default(inference_fn, adda_fn)
super().__init__(*args, inference_fn=inference_fn, **kwargs)
|
get_default_containers()
Returns:
Source code in pytorch_adapt\adapters\adda.py
31
32
33
34
35
36
37
38
39 | def get_default_containers(self) -> MultipleContainers:
"""
Returns:
The default set of containers. This will use the
[default optimizer][pytorch_adapt.adapters.utils.default_optimizer_tuple]
for the T and D models.
"""
optimizers = Optimizers(default_optimizer_tuple(), keys=["T", "D"])
return MultipleContainers(optimizers=optimizers)
|