Skip to content
This repository was archived by the owner on Jul 1, 2024. It is now read-only.

Commit fd79c1c

Browse files
vreisfacebook-github-bot
authored andcommitted
Remove local_variables from on_start (#413)
Summary: Pull Request resolved: #413 This is part of a series of diffs to eliminate local_variables (see D20171981). Proceed removing local_variables from on_start Reviewed By: mannatsingh Differential Revision: D20178165 fbshipit-source-id: 069a05dd418668d3c5122e9e13177efec05b2125
1 parent 841b439 commit fd79c1c

16 files changed

+20
-33
lines changed

classy_vision/hooks/checkpoint_hook.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,7 @@ def _save_checkpoint(self, task, filename):
7878
if checkpoint_file:
7979
PathManager.copy(checkpoint_file, f"{self.checkpoint_folder}/{filename}")
8080

81-
def on_start(
82-
self, task: "tasks.ClassyTask", local_variables: Dict[str, Any]
83-
) -> None:
81+
def on_start(self, task: "tasks.ClassyTask") -> None:
8482
if not is_master() or getattr(task, "test_only", False):
8583
return
8684
if not PathManager.exists(self.checkpoint_folder):

classy_vision/hooks/classy_hook.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,7 @@ def name(cls) -> str:
6565
return cls.__name__
6666

6767
@abstractmethod
68-
def on_start(
69-
self, task: "tasks.ClassyTask", local_variables: Dict[str, Any]
70-
) -> None:
68+
def on_start(self, task: "tasks.ClassyTask") -> None:
7169
"""Called at the start of training."""
7270
pass
7371

classy_vision/hooks/exponential_moving_average_model_hook.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def _save_current_model_state(self, model: nn.Module, model_state: Dict[str, Any
7878
for name, param in self.get_model_state_iterator(model):
7979
model_state[name] = param.detach().clone().to(device=self.device)
8080

81-
def on_start(self, task: ClassyTask, local_variables: Dict[str, Any]) -> None:
81+
def on_start(self, task: ClassyTask) -> None:
8282
if self.state.model_state:
8383
# loaded state from checkpoint, do not re-initialize, only move the state
8484
# to the right device

classy_vision/hooks/model_complexity_hook.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,7 @@ class ModelComplexityHook(ClassyHook):
2828
on_phase_end = ClassyHook._noop
2929
on_end = ClassyHook._noop
3030

31-
def on_start(
32-
self, task: "tasks.ClassyTask", local_variables: Dict[str, Any]
33-
) -> None:
31+
def on_start(self, task: "tasks.ClassyTask") -> None:
3432
"""Measure number of parameters, FLOPs and activations."""
3533
self.num_flops = 0
3634
self.num_activations = 0

classy_vision/hooks/model_tensorboard_hook.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,7 @@ def __init__(self, tb_writer) -> None:
5151

5252
self.tb_writer = tb_writer
5353

54-
def on_start(
55-
self, task: "tasks.ClassyTask", local_variables: Dict[str, Any]
56-
) -> None:
54+
def on_start(self, task: "tasks.ClassyTask") -> None:
5755
"""
5856
Plot the model on Tensorboard.
5957
"""

classy_vision/hooks/profiler_hook.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,7 @@ class ProfilerHook(ClassyHook):
2525
on_phase_end = ClassyHook._noop
2626
on_end = ClassyHook._noop
2727

28-
def on_start(
29-
self, task: "tasks.ClassyTask", local_variables: Dict[str, Any]
30-
) -> None:
28+
def on_start(self, task: "tasks.ClassyTask") -> None:
3129
"""Profile the forward pass."""
3230
logging.info("Profiling forward pass...")
3331
batchsize_per_replica = getattr(

classy_vision/tasks/classification_task.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -854,8 +854,9 @@ def get_global_batchsize(self):
854854
"""
855855
return self.dataloaders[self.phase_type].dataset.get_global_batchsize()
856856

857-
def on_start(self, local_variables):
858-
self.run_hooks(local_variables, ClassyHookFunctions.on_start.name)
857+
def on_start(self):
858+
for hook in self.hooks:
859+
hook.on_start(self)
859860

860861
def on_phase_start(self, local_variables):
861862
self.phase_start_time_total = time.perf_counter()

classy_vision/tasks/classy_task.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def train_step(self, use_gpu) -> None:
119119
pass
120120

121121
@abstractmethod
122-
def on_start(self, local_variables):
122+
def on_start(self):
123123
"""
124124
Start training.
125125

classy_vision/trainer/classy_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def train(self, task: ClassyTask):
7272

7373
local_variables = {}
7474

75-
task.on_start(local_variables)
75+
task.on_start()
7676
while not task.done_training():
7777
task.on_phase_start(local_variables)
7878
while True:

classy_vision/trainer/elastic_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def elastic_train_step(orig_state):
7878
if state.run_start_hooks:
7979
# need this to ensure we don't run the on_start hooks every time
8080
# a trainer starts
81-
state.task.on_start(local_variables)
81+
state.task.on_start()
8282
state.run_start_hooks = False
8383
return state, self._ClassyWorkerStats(None)
8484

0 commit comments

Comments
 (0)