Skip to content

Commit

Permalink
Maintenance: python and pytorch versions updated (#733)
Browse files Browse the repository at this point in the history
* Update and document supported PyTorch versions
- add 1.4.0 since we want to support 4 versions
- use 1.7.1 instead of 1.7.0
- update documentation

* Remove warning abt Python 3.5 support being dropped

* Remove warning about CyclicLR step on epoch
  • Loading branch information
BenjaminBossan authored Dec 23, 2020
1 parent 1c8fc61 commit ddf6247
Show file tree
Hide file tree
Showing 6 changed files with 3 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
strategy:
matrix:
python_version: ['3.6', '3.7', '3.8']
torch_version: ['1.5.1+cpu', '1.6.0+cpu', '1.7.0+cpu']
torch_version: ['1.4.0+cpu', '1.5.1+cpu', '1.6.0+cpu', '1.7.1+cpu']
os: [ubuntu-latest]

steps:
Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -234,10 +234,10 @@ instructions for PyTorch, visit the `PyTorch website
<http://pytorch.org/>`__. skorch officially supports the last four
minor PyTorch versions, which currently are:

- 1.3.1
- 1.4.0
- 1.5.1
- 1.6.0
- 1.7.1

However, that doesn't mean that older versions don't work, just that
they aren't tested. Since skorch mostly relies on the stable part of
Expand Down
2 changes: 1 addition & 1 deletion docs/user/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@ instructions for PyTorch, visit the `PyTorch website
<http://pytorch.org/>`__. skorch officially supports the last four
minor PyTorch versions, which currently are:

- 1.3.1
- 1.4.0
- 1.5.1
- 1.6.0
- 1.7.1

However, that doesn't mean that older versions don't work, just that
they aren't tested. Since skorch mostly relies on the stable part of
Expand Down
8 changes: 0 additions & 8 deletions skorch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,6 @@
MIN_TORCH_VERSION = '1.1.0'


# TODO: remove in skorch 0.10.0
if sys.version_info < (3, 6):
warnings.warn(
"Official support for Python 3.5 will be dropped starting from "
"skorch version 0.10.0",
FutureWarning,
)

try:
# pylint: disable=wrong-import-position
import torch
Expand Down
10 changes: 0 additions & 10 deletions skorch/callbacks/lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,16 +115,6 @@ def initialize(self):
self.policy_ = self._get_policy_cls()
self.lr_scheduler_ = None
self.batch_idx_ = 0
# TODO: Remove this warning on 0.10 release
if (self.policy_ == TorchCyclicLR or self.policy_ == "TorchCyclicLR"
and self.step_every == 'epoch'):
warnings.warn(
"The LRScheduler now makes a step every epoch by default. "
"To have the cyclic lr scheduler update "
"every batch set step_every='batch'",
FutureWarning
)

return self

def _get_policy_cls(self):
Expand Down
18 changes: 0 additions & 18 deletions skorch/tests/callbacks/test_lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,26 +241,8 @@ def test_lr_scheduler_record_batch_step(self, classifier_module, classifier_data
)
assert np.all(net.history[-1, 'batches', :, 'event_lr'] == new_lrs)

def test_cyclic_lr_with_epoch_step_warning(self,
classifier_module,
classifier_data):
msg = ("The LRScheduler now makes a step every epoch by default. "
"To have the cyclic lr scheduler update "
"every batch set step_every='batch'")
with pytest.warns(FutureWarning, match=msg) as record:
scheduler = LRScheduler(
TorchCyclicLR, base_lr=123, max_lr=999)
net = NeuralNetClassifier(
classifier_module,
max_epochs=0,
callbacks=[('scheduler', scheduler)],
)
net.initialize()
assert len(record) == 1


class TestReduceLROnPlateau:

def get_net_with_mock(
self, classifier_data, classifier_module, monitor='train_loss'):
"""Returns a net with a mocked lr policy that allows to check what
Expand Down

0 comments on commit ddf6247

Please sign in to comment.