Skip to content

Fix type hints of tuner/batch_size_scaling.py #13518

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
b6cda17
Fix type hints
ar90n Jul 3, 2022
3f1d70c
fix
ar90n Jul 3, 2022
ab9bcba
Use Any to use data.py
ar90n Jul 3, 2022
8361712
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 3, 2022
fb088c3
Remove unused import
ar90n Jul 3, 2022
b5270c7
Fix missing import
ar90n Jul 3, 2022
61e3c8f
Update src/pytorch_lightning/tuner/batch_size_scaling.py
ar90n Jul 4, 2022
4d952ec
Remove unneccesary fix
ar90n Jul 5, 2022
5419848
Use assert to narrow types
ar90n Jul 5, 2022
1a1e265
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 5, 2022
ca45a66
Add None check explicitly
ar90n Jul 5, 2022
d23e262
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 5, 2022
122c930
Simplify type narrowing
ar90n Jul 17, 2022
c1f3842
Merge remote-tracking branch 'origin/master' into fix-type-hints-tune…
ar90n Jul 17, 2022
3344c48
Fix wrong code merging
ar90n Jul 17, 2022
5c4b360
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jul 17, 2022
ba2ec9a
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
Borda Jul 17, 2022
0897b6c
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
otaj Jul 22, 2022
a93c45e
merge master
Sep 13, 2022
27e9cda
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
Borda Sep 14, 2022
e8754de
,
Borda Sep 14, 2022
319c4ff
..
Borda Sep 14, 2022
c54c13e
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
Borda Sep 22, 2022
1ced0a8
Merge remote-tracking branch 'origin/master' into fix-type-hints-tune…
ar90n Sep 28, 2022
28ac120
Add type hints
ar90n Sep 28, 2022
ffeb484
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 28, 2022
d7f7388
Add property type annotation
ar90n Sep 28, 2022
c6c793b
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Sep 29, 2022
ad06e18
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
otaj Sep 29, 2022
14336bc
Merge branch 'master' into fix-type-hints-tuner-batch_size_scaling.py
otaj Sep 29, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ module = [
"pytorch_lightning.callbacks.progress.rich_progress",
"pytorch_lightning.trainer.trainer",
"pytorch_lightning.trainer.connectors.checkpoint_connector",
"pytorch_lightning.tuner.batch_size_scaling",
"lightning_app.api.http_methods",
"lightning_app.api.request_types",
"lightning_app.cli.app-template.app",
Expand Down
2 changes: 2 additions & 0 deletions src/pytorch_lightning/callbacks/batch_size_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
class BatchSizeFinder(Callback):
SUPPORTED_MODES = ("power", "binsearch")

optimal_batch_size: Optional[int]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this annotated in the class definition instead of in the __init__?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@carmocca
This is my mistake. And my modification wasn't in time.
In the head of my fork branch, this annotation is moved into init.
May I create a new PR to move this annotation into init ?

ar90n@7076c1b

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, please. Thank you 💜


def __init__(
self,
mode: str = "power",
Expand Down
20 changes: 15 additions & 5 deletions src/pytorch_lightning/tuner/batch_size_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ def scale_batch_size(
init_val: int = 2,
max_trials: int = 25,
batch_arg_name: str = "batch_size",
):
) -> Optional[int]:
if trainer.fast_dev_run:
rank_zero_warn("Skipping batch size scaler since `fast_dev_run` is enabled.")
return
return None

# Save initial model, that is loaded after batch size is found
ckpt_path = os.path.join(trainer.default_root_dir, f".scale_batch_size_{uuid.uuid4()}.ckpt")
Expand Down Expand Up @@ -141,7 +141,12 @@ def __scale_batch_restore_params(trainer: "pl.Trainer", params: Dict[str, Any])


def _run_power_scaling(
trainer: "pl.Trainer", pl_module: "pl.LightningModule", new_size: int, batch_arg_name: str, max_trials: int, params
trainer: "pl.Trainer",
pl_module: "pl.LightningModule",
new_size: int,
batch_arg_name: str,
max_trials: int,
params: Dict[str, Any],
) -> int:
"""Batch scaling mode where the size is doubled at each iteration until an OOM error is encountered."""
# this flag is used to determine whether the previously scaled batch size, right before OOM, was a success or not
Expand Down Expand Up @@ -179,7 +184,12 @@ def _run_power_scaling(


def _run_binary_scaling(
trainer: "pl.Trainer", pl_module: "pl.LightningModule", new_size: int, batch_arg_name: str, max_trials: int, params
trainer: "pl.Trainer",
pl_module: "pl.LightningModule",
new_size: int,
batch_arg_name: str,
max_trials: int,
params: Dict[str, Any],
) -> int:
"""Batch scaling mode where the size is initially is doubled at each iteration until an OOM error is
encountered.
Expand Down Expand Up @@ -309,7 +319,7 @@ def _reset_dataloaders(trainer: "pl.Trainer", pl_module: "pl.LightningModule") -
reset_fn(pl_module)


def _try_loop_run(trainer: "pl.Trainer", params) -> None:
def _try_loop_run(trainer: "pl.Trainer", params: Dict[str, Any]) -> None:
if trainer.state.fn == "fit":
loop = trainer.fit_loop
else:
Expand Down