--- title: MQESRNN model keywords: fastai sidebar: home_sidebar summary: "API details." description: "API details." nb_path: "nbs/models_esrnn__mqesrnn.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

class MQESRNN[source]

MQESRNN(n_series:int, input_size:int, output_size:int, n_x:int=0, n_s:int=0, sample_freq:int=1, es_component:str='median_residual', cell_type:str='LSTM', state_hsize:int=50, dilations:List[List[int]]=[[1, 2], [4, 8]], add_nl_layer:bool=False, learning_rate:float=0.001, lr_scheduler_step_size:int=9, lr_decay:float=0.9, gradient_eps:float=1e-08, gradient_clipping_threshold:float=20.0, rnn_weight_decay:float=0.0, noise_std:float=0.001, testing_percentiles:List[float]=[2.5, 5.0, 50.0, 95.0, 97.5], training_percentiles:List[float]=[2.5, 5.0, 50.0, 95.0, 97.5], loss:str='MQ', val_loss:str='MQ', frequency:str='D') :: ESRNN

Hooks to be used in LightningModule.

{% endraw %} {% raw %}
{% endraw %}

Tests MQESRNN

{% raw %}
import numpy as np
import pandas as pd
import pytorch_lightning as pl
import matplotlib.pyplot as plt
import torch as t

from neuralforecast.data.tsdataset import TimeSeriesDataset
from neuralforecast.data.tsloader import TimeSeriesLoader
{% endraw %} {% raw %}
n_ds = 100 
n_ts = 1_000

output_size = 10

uids = [f'uid_{i + 1}' for i in range(n_ts)]
dss = pd.date_range(end='2020-01-01', periods=n_ds)

Y_df = pd.DataFrame({'unique_id': np.repeat(uids, n_ds), 'ds': np.tile(dss, n_ts)})
{% endraw %} {% raw %}
np.random.seed(10)
Y_df['y'] = Y_df.groupby('unique_id').transform(lambda x: np.random.uniform(1, 100, size=len(x)))
{% endraw %} {% raw %}
Y_df = Y_df.sort_values(['unique_id', 'ds'])
{% endraw %} {% raw %}
train_dataset = TimeSeriesDataset(Y_df=Y_df,
                                  ds_in_test=3*output_size,
                                  is_test=False,
                                  input_size=7*output_size,
                                  output_size=output_size,
                                  verbose=True)

valid_dataset = TimeSeriesDataset(Y_df=Y_df,
                                  input_size=7*output_size,
                                  output_size=output_size,
                                  verbose=False)
{% endraw %} {% raw %}
train_loader = TimeSeriesLoader(dataset=train_dataset,
                                batch_size=32,
                                eq_batch_size=True,
                                shuffle=True)

valid_loader = TimeSeriesLoader(dataset=valid_dataset,
                                batch_size=32,
                                shuffle=False)
{% endraw %} {% raw %}
model = MQESRNN(n_series=train_dataset.n_series,
                n_s=train_dataset.n_s,
                n_x=train_dataset.n_x,
                #sample_freq=dataset.sample_freq,
                sample_freq=1,
                input_size=7*2,
                output_size=output_size,
                learning_rate=1e-2,
                lr_scheduler_step_size=30,
                lr_decay=0.1,
                gradient_eps=1e-8,
                gradient_clipping_threshold=10,
                rnn_weight_decay=0,
                noise_std=0.001,
                testing_percentiles=[30, 50, 70, 90],
                training_percentiles=[30, 50, 70, 90],
                es_component='median_residual',
                cell_type='LSTM',
                state_hsize=100,
                dilations=[[1, 2], [4, 8]],
                add_nl_layer=False,
                loss='MQ',
                val_loss='MQ')
{% endraw %} {% raw %}
trainer = pl.Trainer(max_epochs=1, progress_bar_refresh_rate=5, deterministic=True)
trainer.fit(model, train_loader, valid_loader)
{% endraw %} {% raw %}
outputs = trainer.predict(model, valid_loader)
{% endraw %} {% raw %}
y_true, y_hat, mask = zip(*outputs)
y_true = t.cat(y_true).numpy()
y_hat = t.cat(y_hat).numpy()
mask = t.cat(mask).numpy()
{% endraw %} {% raw %}
y_true = y_true[-1000:, -1]
y_hat = y_hat[-1000:, -1]
{% endraw %} {% raw %}
plt.plot(y_true.flatten(), alpha=0.5, label='y')
for idx, p in enumerate([30, 50, 70, 90]):
    y_p = y_hat[:, :, idx]
    plt.plot(y_p.flatten(), alpha=0.5, label=f'p{p}')
    print(f'calibration p{p}: ', (y_true.flatten() <= y_p.flatten()).mean())
plt.legend()
{% endraw %} {% raw %}
forecast = model.forecast(Y_df, batch_size=128)
{% endraw %} {% raw %}
forecast.query('unique_id == "uid_1"').set_index('ds').plot()
{% endraw %}