Skip to content

Commit 9186ff1

Browse files
sdesrozisDesroziers
authored andcommitted
[skip ci] Add doctest for LR Schedulers (#2384)
* [skip ci] doctest for CosineAnnealingScheduler * doctest for ConcatScheduler Co-authored-by: Desroziers <[email protected]>
1 parent c19480c commit 9186ff1

File tree

1 file changed

+85
-37
lines changed

1 file changed

+85
-37
lines changed

ignite/handlers/param_scheduler.py

Lines changed: 85 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -365,8 +365,6 @@ class LinearCyclicalScheduler(CyclicalScheduler):
365365
366366
.. testcode:: 1
367367
368-
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
369-
370368
# Linearly increases the learning rate from 0.0 to 1.0 and back to 0.0
371369
# over a cycle of 4 iterations
372370
scheduler = LinearCyclicalScheduler(default_optimizer, "lr", 0.0, 1.0, 4)
@@ -389,8 +387,6 @@ def print_lr():
389387
390388
.. testcode:: 2
391389
392-
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
393-
394390
optimizer = torch.optim.SGD(
395391
[
396392
{"params": default_model.base.parameters(), "lr": 0.001},
@@ -400,19 +396,19 @@ def print_lr():
400396
401397
# Linearly increases the learning rate from 0.0 to 1.0 and back to 0.0
402398
# over a cycle of 4 iterations
403-
scheduler1 = LinearCyclicalScheduler(optimizer, "lr", 0.0, 1.0, 4, param_group_index=0)
399+
scheduler1 = LinearCyclicalScheduler(optimizer, "lr (base)", 0.0, 1.0, 4, param_group_index=0)
404400
405-
# Linearly increases the learning rate from 1.0 to 0.0 and back to 0.1
401+
# Linearly increases the learning rate from 0.0 to 0.1 and back to 0.0
406402
# over a cycle of 4 iterations
407-
scheduler2 = LinearCyclicalScheduler(optimizer, "lr", 0.0, 0.1, 4, param_group_index=1)
403+
scheduler2 = LinearCyclicalScheduler(optimizer, "lr (fc)", 0.0, 0.1, 4, param_group_index=1)
408404
409405
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1)
410406
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2)
411407
412408
@default_trainer.on(Events.ITERATION_COMPLETED)
413409
def print_lr():
414-
print(optimizer.param_groups[0]["lr"],
415-
optimizer.param_groups[1]["lr"])
410+
print(optimizer.param_groups[0]["lr (base)"],
411+
optimizer.param_groups[1]["lr (fc)"])
416412
417413
default_trainer.run([0] * 9, max_epochs=1)
418414
@@ -460,33 +456,67 @@ class CosineAnnealingScheduler(CyclicalScheduler):
460456
usually be the number of batches in an epoch.
461457
462458
Examples:
463-
.. code-block:: python
464459
465-
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
460+
.. testsetup:: *
466461
467-
scheduler = CosineAnnealingScheduler(optimizer, 'lr', 1e-1, 1e-3, len(train_loader))
468-
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
469-
#
470-
# Anneals the learning rate from 1e-1 to 1e-3 over the course of 1 epoch.
471-
#
462+
default_trainer = get_default_trainer()
472463
473-
.. code-block:: python
464+
.. testcode:: 1
465+
466+
# CosineAnnealing increases the learning rate from 0.0 to 1.0
467+
# over a cycle of 4 iterations
468+
scheduler = CosineAnnealingScheduler(default_optimizer, "lr", 0.0, 1.0, 4)
469+
470+
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler)
471+
472+
@default_trainer.on(Events.ITERATION_COMPLETED)
473+
def print_lr():
474+
print(default_optimizer.param_groups[0]["lr"])
475+
476+
default_trainer.run([0] * 9, max_epochs=1)
474477
475-
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
476-
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
478+
.. testoutput:: 1
477479
478-
optimizer = SGD(
480+
0.0
481+
0.1464...
482+
0.4999...
483+
0.8535...
484+
...
485+
486+
.. testcode:: 2
487+
488+
optimizer = torch.optim.SGD(
479489
[
480-
{"params": model.base.parameters(), 'lr': 0.001},
481-
{"params": model.fc.parameters(), 'lr': 0.01},
490+
{"params": default_model.base.parameters(), "lr": 0.001},
491+
{"params": default_model.fc.parameters(), "lr": 0.01},
482492
]
483493
)
484494
485-
scheduler1 = LinearCyclicalScheduler(optimizer, 'lr', 1e-7, 1e-5, len(train_loader), param_group_index=0)
486-
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1, "lr (base)")
495+
# CosineAnnealing increases the learning rate from 0.0 to 1.0
496+
# over a cycle of 4 iterations
497+
scheduler1 = CosineAnnealingScheduler(optimizer, "lr (base)", 0.0, 1.0, 4, param_group_index=0)
487498
488-
scheduler2 = CosineAnnealingScheduler(optimizer, 'lr', 1e-5, 1e-3, len(train_loader), param_group_index=1)
489-
trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2, "lr (fc)")
499+
# CosineAnnealing increases the learning rate from 0.0 to 0.1
500+
# over a cycle of 4 iterations
501+
scheduler2 = CosineAnnealingScheduler(optimizer, "lr (fc)", 0.0, 0.1, 4, param_group_index=1)
502+
503+
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler1)
504+
default_trainer.add_event_handler(Events.ITERATION_STARTED, scheduler2)
505+
506+
@default_trainer.on(Events.ITERATION_COMPLETED)
507+
def print_lr():
508+
print(optimizer.param_groups[0]["lr (base)"],
509+
optimizer.param_groups[1]["lr (fc)"])
510+
511+
default_trainer.run([0] * 9, max_epochs=1)
512+
513+
.. testoutput:: 2
514+
515+
0.0 0.0
516+
0.1464... 0.01464...
517+
0.4999... 0.04999...
518+
0.8535... 0.08535...
519+
...
490520
491521
.. [Smith17] Smith, Leslie N. "Cyclical learning rates for training neural networks."
492522
Applications of Computer Vision (WACV), 2017 IEEE Winter Conference on. IEEE, 2017
@@ -513,21 +543,39 @@ class ConcatScheduler(ParamScheduler):
513543
`engine.state.param_history`, (default=False).
514544
515545
Examples:
516-
.. code-block:: python
517546
518-
from ignite.handlers.param_scheduler import ConcatScheduler
519-
from ignite.handlers.param_scheduler import LinearCyclicalScheduler
520-
from ignite.handlers.param_scheduler import CosineAnnealingScheduler
547+
.. testsetup::
521548
522-
scheduler_1 = LinearCyclicalScheduler(optimizer, "lr", start_value=0.1, end_value=0.5, cycle_size=60)
523-
scheduler_2 = CosineAnnealingScheduler(optimizer, "lr", start_value=0.5, end_value=0.01, cycle_size=60)
549+
default_trainer = get_default_trainer()
550+
551+
.. testcode::
524552
525-
combined_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[30, ])
526-
trainer.add_event_handler(Events.ITERATION_STARTED, combined_scheduler)
527-
#
528-
# Sets the Learning rate linearly from 0.1 to 0.5 over 30 iterations. Then
529-
# starts an annealing schedule from 0.5 to 0.01 over 60 iterations.
553+
scheduler_1 = LinearCyclicalScheduler(default_optimizer, "lr", 0.0, 1.0, 8)
554+
scheduler_2 = CosineAnnealingScheduler(default_optimizer, "lr", 1.0, 0.2, 4)
555+
556+
# Sets the Learning rate linearly from 0.0 to 1.0 over 4 iterations. Then
557+
# starts an annealing schedule from 1.0 to 0.2 over the next 4 iterations.
530558
# The annealing cycles are repeated indefinitely.
559+
combined_scheduler = ConcatScheduler(schedulers=[scheduler_1, scheduler_2], durations=[4, ])
560+
561+
default_trainer.add_event_handler(Events.ITERATION_STARTED, combined_scheduler)
562+
563+
@default_trainer.on(Events.ITERATION_COMPLETED)
564+
def print_lr():
565+
print(default_optimizer.param_groups[0]["lr"])
566+
567+
default_trainer.run([0] * 8, max_epochs=1)
568+
569+
.. testoutput::
570+
571+
0.0
572+
0.25
573+
0.5
574+
0.75
575+
1.0
576+
0.8828...
577+
0.6000...
578+
0.3171...
531579
532580
.. versionadded:: 0.4.5
533581
"""

0 commit comments

Comments
 (0)