Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FEAT] TiDE model #971

Merged
merged 10 commits into from
May 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 12 additions & 11 deletions action_files/test_models/src/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,17 +42,18 @@ def evaluate(model: str, dataset: str, group: str):
if __name__ == '__main__':
groups = ['Monthly']
models = ['AutoDilatedRNN', 'RNN', 'TCN', 'DeepAR',
'NHITS', 'TFT', 'AutoMLP', 'DLinear', 'VanillaTransformer']
'NHITS', 'TFT', 'AutoMLP', 'DLinear', 'VanillaTransformer',
'BiTCN', 'TiDE']
datasets = ['M3']
evaluation = [evaluate(model, dataset, group) for model, group in product(models, groups) for dataset in datasets]
evaluation = [eval_ for eval_ in evaluation if eval_ is not None]
evaluation = pd.concat(evaluation)
evaluation = evaluation[['dataset', 'model', 'time', 'mae', 'smape']]
evaluation['time'] /= 60 #minutes
evaluation = evaluation.set_index(['dataset', 'model']).stack().reset_index()
evaluation.columns = ['dataset', 'model', 'metric', 'val']
evaluation = evaluation.set_index(['dataset', 'metric', 'model']).unstack().round(3)
evaluation = evaluation.droplevel(0, 1).reset_index()
evaluation['AutoARIMA'] = [666.82, 15.35, 3.000]
evaluation.to_csv('data/evaluation.csv')
print(evaluation.T)
df_evaluation = pd.concat(evaluation)
df_evaluation = df_evaluation.loc[:, ['dataset', 'model', 'time', 'mae', 'smape']]
df_evaluation['time'] /= 60 #minutes
df_evaluation = df_evaluation.set_index(['dataset', 'model']).stack().reset_index()
df_evaluation.columns = ['dataset', 'model', 'metric', 'val']
df_evaluation = df_evaluation.set_index(['dataset', 'metric', 'model']).unstack().round(3)
df_evaluation = df_evaluation.droplevel(0, 1).reset_index()
df_evaluation['AutoARIMA'] = [666.82, 15.35, 3.000]
df_evaluation.to_csv('data/evaluation.csv')
print(df_evaluation.T)
65 changes: 27 additions & 38 deletions action_files/test_models/src/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,39 @@
import time

import fire
import numpy as np
# import numpy as np
import pandas as pd
import pytorch_lightning as pl
import torch
# import pytorch_lightning as pl
# import torch

import neuralforecast
# import neuralforecast
from neuralforecast.core import NeuralForecast

from neuralforecast.models.gru import GRU
# from neuralforecast.models.gru import GRU
from neuralforecast.models.rnn import RNN
from neuralforecast.models.tcn import TCN
from neuralforecast.models.lstm import LSTM
from neuralforecast.models.dilated_rnn import DilatedRNN
# from neuralforecast.models.lstm import LSTM
# from neuralforecast.models.dilated_rnn import DilatedRNN
from neuralforecast.models.deepar import DeepAR
from neuralforecast.models.mlp import MLP
# from neuralforecast.models.mlp import MLP
from neuralforecast.models.nhits import NHITS
from neuralforecast.models.nbeats import NBEATS
from neuralforecast.models.nbeatsx import NBEATSx
# from neuralforecast.models.nbeats import NBEATS
# from neuralforecast.models.nbeatsx import NBEATSx
from neuralforecast.models.tft import TFT
from neuralforecast.models.vanillatransformer import VanillaTransformer
from neuralforecast.models.informer import Informer
from neuralforecast.models.autoformer import Autoformer
from neuralforecast.models.patchtst import PatchTST
# from neuralforecast.models.informer import Informer
# from neuralforecast.models.autoformer import Autoformer
# from neuralforecast.models.patchtst import PatchTST
from neuralforecast.models.dlinear import DLinear
from neuralforecast.models.bitcn import BiTCN
from neuralforecast.models.tide import TiDE

from neuralforecast.auto import (
AutoMLP, AutoNHITS, AutoNBEATS, AutoDilatedRNN, AutoTFT
AutoMLP,
# AutoNHITS,
# AutoNBEATS,
AutoDilatedRNN,
# AutoTFT
)

from neuralforecast.losses.pytorch import SMAPE, MAE
Expand All @@ -43,13 +49,6 @@ def main(dataset: str = 'M3', group: str = 'Monthly') -> None:
train, horizon, freq, seasonality = get_data('data/', dataset, group)
train['ds'] = pd.to_datetime(train['ds'])

config_nbeats = {
"input_size": tune.choice([2 * horizon]),
"max_steps": 1000,
"val_check_steps": 300,
"scaler_type": "minmax1",
"random_seed": tune.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
}
config = {
"hidden_size": tune.choice([256, 512]),
"num_layers": tune.choice([2, 4]),
Expand All @@ -64,6 +63,7 @@ def main(dataset: str = 'M3', group: str = 'Monthly') -> None:
"max_steps": 300,
"val_check_steps": 100,
"random_seed": tune.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),}

models = [
AutoDilatedRNN(h=horizon, loss=MAE(), config=config_drnn, num_samples=2, cpus=1),
RNN(h=horizon, input_size=2 * horizon, encoder_hidden_size=50, max_steps=300),
Expand All @@ -74,10 +74,12 @@ def main(dataset: str = 'M3', group: str = 'Monthly') -> None:
TFT(h=horizon, input_size=2 * horizon, loss=SMAPE(), hidden_size=64, scaler_type='robust', windows_batch_size=512, max_steps=1500, val_check_steps=500),
VanillaTransformer(h=horizon, input_size=2 * horizon, loss=MAE(), hidden_size=64, scaler_type='minmax1', windows_batch_size=512, max_steps=1500, val_check_steps=500),
DeepAR(h=horizon, input_size=2 * horizon, scaler_type='minmax1', max_steps=1000),
BiTCN(h=horizon, input_size=2 * horizon, loss=MAE(), dropout=0.0, max_steps=1000, val_check_steps=500),
TiDE(h=horizon, input_size=2 * horizon, loss=MAE(), max_steps=1000, val_check_steps=500),
]

# Models
for model in models[:-1]:
for model in models:
model_name = type(model).__name__
print(50*'-', model_name, 50*'-')
start = time.time()
Expand All @@ -87,26 +89,13 @@ def main(dataset: str = 'M3', group: str = 'Monthly') -> None:
end = time.time()
print(end - start)

if model_name == 'DeepAR':
forecasts = forecasts[['unique_id', 'ds', 'DeepAR-median']]

forecasts.columns = ['unique_id', 'ds', model_name]
forecasts.to_csv(f'data/{model_name}-forecasts-{dataset}-{group}.csv', index=False)
time_df = pd.DataFrame({'time': [end - start], 'model': [model_name]})
time_df.to_csv(f'data/{model_name}-time-{dataset}-{group}.csv', index=False)

# DeepAR
model_name = type(models[-1]).__name__
start = time.time()
fcst = NeuralForecast(models=[models[-1]], freq=freq)
fcst.fit(train)
forecasts = fcst.predict()
end = time.time()
print(end - start)

forecasts = forecasts[['unique_id', 'ds', 'DeepAR-median']]
forecasts.columns = ['unique_id', 'ds', 'DeepAR']
forecasts.to_csv(f'data/{model_name}-forecasts-{dataset}-{group}.csv', index=False)
time_df = pd.DataFrame({'time': [end - start], 'model': [model_name]})
time_df.to_csv(f'data/{model_name}-time-{dataset}-{group}.csv', index=False)


if __name__ == '__main__':
fire.Fire(main)
3 changes: 2 additions & 1 deletion nbs/core.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
" Informer, Autoformer, FEDformer,\n",
" StemGNN, PatchTST, TimesNet, TimeLLM, TSMixer, TSMixerx,\n",
" MLPMultivariate, iTransformer,\n",
" BiTCN,\n",
" BiTCN, TiDE\n",
")"
]
},
Expand Down Expand Up @@ -238,6 +238,7 @@
" 'mlpmultivariate': MLPMultivariate, 'automlpmultivariate': MLPMultivariate,\n",
" 'itransformer': iTransformer, 'autoitransformer': iTransformer,\n",
" 'bitcn': BiTCN, 'autobitcn': BiTCN,\n",
" 'tide': TiDE, 'autotide': TiDE,\n",
"}"
]
},
Expand Down
Binary file added nbs/imgs_models/tide.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Loading