--- title: Informer keywords: fastai sidebar: home_sidebar summary: "API details." description: "API details." nb_path: "nbs/models_transformer__informer.ipynb" ---
from neuralforecast.data.datasets.long_horizon import LongHorizon
Y_df, X_df, S_df = LongHorizon.load(directory='./data', group='ETTm2')
Y_df = Y_df.reset_index(drop=True)
Y_df.loc[Y_df['unique_id']=='OT','y'] = Y_df[Y_df['unique_id']=='OT']['y'] + 100 #To obseve differences
Y_df.head()
X_df.head()
f_cols = X_df.drop(columns=['unique_id', 'ds']).columns.to_list()
mc_model = {}
mc_model['seq_len'] = 96
mc_model['label_len'] = 48
mc_model['pred_len'] = 96
mc_model['output_attention'] = False
mc_model['enc_in'] = 7
mc_model['dec_in'] = 7
mc_model['d_model'] = 512
mc_model['c_out'] = 7
mc_model['embed'] = 'timeF'
mc_model['freq'] = 'h'
mc_model['dropout'] = 0.05
mc_model['factor'] = 1
mc_model['n_heads'] = 8
mc_model['d_ff'] = 2_048
mc_model['activation'] = 'gelu'
mc_model['e_layers'] = 2
mc_model['d_layers'] = 1
mc_model['distil'] = None
mc_model['loss_train'] = 'MAE'
mc_model['loss_hypar'] = 0.5
mc_model['loss_valid'] = 'MAE'
mc_model['learning_rate'] = 0.001
mc_model['lr_decay'] = 0.5
mc_model['weight_decay'] = 0.
mc_model['lr_decay_step_size'] = 2
mc_model['random_seed'] = 1
# Dataset parameters
mc_data = {}
mc_data['mode'] = 'iterate_windows'
mc_data['n_time_in'] = mc_model['seq_len']
mc_data['n_time_out'] = mc_model['pred_len']
mc_data['batch_size'] = 1
mc_data['normalizer_y'] = None
mc_data['normalizer_x'] = None
mc_data['max_epochs'] = None
mc_data['max_steps'] = 1
mc_data['early_stop_patience'] = 20
len_val = 11_520
len_test = 11_520
from neuralforecast.data.tsdataset import IterateWindowsDataset
from torch.utils.data import DataLoader
from neuralforecast.experiments.utils import create_datasets
train_dataset, val_dataset, test_dataset, scaler_y = create_datasets(mc=mc_data,
S_df=None,
Y_df=Y_df, X_df=X_df,
f_cols=f_cols,
ds_in_val=len_val,
ds_in_test=len_test)
train_loader = DataLoader(dataset=train_dataset,
batch_size=int(mc_data['batch_size']),
shuffle=True,
drop_last=True)
val_loader = DataLoader(dataset=val_dataset,
batch_size=int(mc_data['batch_size']),
shuffle=False)
test_loader = DataLoader(dataset=test_dataset,
batch_size=int(mc_data['batch_size']),
shuffle=False)
model = Informer(**mc_model)
early_stopping = pl.callbacks.EarlyStopping(monitor='val_loss',
min_delta=1e-4,
patience=mc_data['early_stop_patience'],
verbose=False,
mode="min")
trainer = pl.Trainer(max_epochs=mc_data['max_epochs'],
max_steps=mc_data['max_steps'],
gradient_clip_val=1.0,
progress_bar_refresh_rate=10,
check_val_every_n_epoch=1,
num_sanity_val_steps=1,
val_check_interval=1,
limit_val_batches=1,
callbacks=[early_stopping])
trainer.fit(model, train_loader, val_loader)
#print("outputs[0][0].shape", outputs[0][0].shape)
#print("outputs[0][1].shape", outputs[0][1].shape)
#print("outputs[0][2].shape", outputs[0][2].shape)
Y_forecast_df = Y_df[Y_df['ds']<'2017-10-24']
Y_forecast_df = Y_forecast_df.reset_index(drop=True)
Y_forecast_df.tail()
X_forecast_df = X_df[X_df['ds']<'2017-10-25']
X_forecast_df = X_forecast_df.reset_index(drop=True)
X_forecast_df['ds'] = pd.to_datetime(X_forecast_df['ds'])
X_forecast_df.tail()
forecast_df = model.forecast(Y_df=Y_forecast_df, X_df=X_forecast_df, S_df=S_df)