Skip to content

Commit

Permalink
breaking: remove deprecated behavior (#1220)
Browse files Browse the repository at this point in the history
  • Loading branch information
jmoralez authored Dec 19, 2024
1 parent df8c431 commit 33502b0
Show file tree
Hide file tree
Showing 99 changed files with 3,745 additions and 90,097 deletions.
16 changes: 5 additions & 11 deletions nbs/common.base_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -360,16 +360,6 @@
" datamodule_constructor = _DistributedTimeSeriesDataModule\n",
" \n",
" dataloader_kwargs = self.dataloader_kwargs if self.dataloader_kwargs is not None else {}\n",
" \n",
" if self.num_workers_loader != 0: # value is not at its default\n",
" warnings.warn(\n",
" \"The `num_workers_loader` argument is deprecated and will be removed in a future version. \"\n",
" \"Please provide num_workers through `dataloader_kwargs`, e.g. \"\n",
" f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n",
" category=FutureWarning,\n",
" )\n",
" dataloader_kwargs['num_workers'] = self.num_workers_loader\n",
"\n",
" datamodule = datamodule_constructor(\n",
" dataset=dataset, \n",
" batch_size=batch_size,\n",
Expand Down Expand Up @@ -472,7 +462,11 @@
"\n",
" @classmethod\n",
" def load(cls, path, **kwargs):\n",
" with fsspec.open(path, 'rb') as f:\n",
" if \"weights_only\" in inspect.signature(torch.load).parameters:\n",
" kwargs[\"weights_only\"] = False\n",
" with fsspec.open(path, 'rb') as f, warnings.catch_warnings():\n",
" # ignore possible warnings about weights_only=False\n",
" warnings.filterwarnings('ignore', category=FutureWarning)\n",
" content = torch.load(f, **kwargs)\n",
" with _disable_torch_init():\n",
" model = cls(**content['hyper_parameters']) \n",
Expand Down
2 changes: 0 additions & 2 deletions nbs/common.base_multivariate.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@
" futr_exog_list=None,\n",
" hist_exog_list=None,\n",
" stat_exog_list=None,\n",
" num_workers_loader=0,\n",
" drop_last_loader=False,\n",
" random_seed=1, \n",
" alias=None,\n",
Expand Down Expand Up @@ -173,7 +172,6 @@
" self.decompose_forecast = False\n",
"\n",
" # DataModule arguments\n",
" self.num_workers_loader = num_workers_loader\n",
" self.dataloader_kwargs = dataloader_kwargs\n",
" self.drop_last_loader = drop_last_loader\n",
" # used by on_validation_epoch_end hook\n",
Expand Down
3 changes: 0 additions & 3 deletions nbs/common.base_recurrent.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@
" futr_exog_list=None,\n",
" hist_exog_list=None,\n",
" stat_exog_list=None,\n",
" num_workers_loader=0,\n",
" drop_last_loader=False,\n",
" random_seed=1, \n",
" alias=None,\n",
Expand Down Expand Up @@ -172,7 +171,6 @@
" self.test_size = 0\n",
"\n",
" # DataModule arguments\n",
" self.num_workers_loader = num_workers_loader\n",
" self.dataloader_kwargs = dataloader_kwargs\n",
" self.drop_last_loader = drop_last_loader\n",
" # used by on_validation_epoch_end hook\n",
Expand Down Expand Up @@ -553,7 +551,6 @@
" datamodule = TimeSeriesDataModule(\n",
" dataset=dataset,\n",
" valid_batch_size=self.valid_batch_size,\n",
" num_workers=self.num_workers_loader,\n",
" **data_module_kwargs\n",
" )\n",
" fcsts = trainer.predict(self, datamodule=datamodule)\n",
Expand Down
2 changes: 0 additions & 2 deletions nbs/common.base_windows.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@
" hist_exog_list=None,\n",
" stat_exog_list=None,\n",
" exclude_insample_y=False,\n",
" num_workers_loader=0,\n",
" drop_last_loader=False,\n",
" random_seed=1,\n",
" alias=None,\n",
Expand Down Expand Up @@ -188,7 +187,6 @@
" self.decompose_forecast = False\n",
"\n",
" # DataModule arguments\n",
" self.num_workers_loader = num_workers_loader\n",
" self.dataloader_kwargs = dataloader_kwargs\n",
" self.drop_last_loader = drop_last_loader\n",
" # used by on_validation_epoch_end hook\n",
Expand Down
Loading

0 comments on commit 33502b0

Please sign in to comment.