--- title: Experiment Utils keywords: fastai sidebar: home_sidebar summary: "Set of functions to easily perform experiments." description: "Set of functions to easily perform experiments." nb_path: "nbs/experiments__utils.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_mask_dfs[source]

get_mask_dfs(Y_df, ds_in_val, ds_in_test)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_random_mask_dfs[source]

get_random_mask_dfs(Y_df, ds_in_test, n_val_windows, n_ds_val_window, n_uids, freq)

Generates train, test and random validation mask. Train mask begins by avoiding ds_in_test

Validation mask: 1) samples n_uids unique ids 2) creates windows of size n_ds_val_window

Parameters

ds_in_test: int Number of ds in test. n_uids: int Number of unique ids in validation. n_val_windows: int Number of windows for validation. n_ds_val_window: int Number of ds in each validation window. periods: int ds_in_test multiplier. freq: str string that determines datestamp frequency, used in random windows creation.

{% endraw %} {% raw %}
{% endraw %} {% raw %}

scale_data[source]

scale_data(Y_df, X_df, mask_df, normalizer_y, normalizer_x)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

create_datasets[source]

create_datasets(mc, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_loaders[source]

instantiate_loaders(mc, train_dataset, val_dataset, test_dataset)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_nbeats[source]

instantiate_nbeats(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_esrnn[source]

instantiate_esrnn(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_mqesrnn[source]

instantiate_mqesrnn(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_deepmidas[source]

instantiate_deepmidas(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_model[source]

instantiate_model(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

model_fit_predict[source]

model_fit_predict(mc, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

evaluate_model[source]

evaluate_model(mc, loss_function, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random, loss_kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

hyperopt_tunning[source]

hyperopt_tunning(space, hyperopt_max_evals, loss_function, S_df, Y_df, X_df, f_cols, ds_in_val, n_uids, n_val_windows, freq, is_val_random, save_trials=False, loss_kwargs=None)

{% endraw %} {% raw %}
{% endraw %}

Experiment Utils Examples

{% raw %}
import torch as t
from nixtlats.losses.numpy import mae, mape, smape, rmse, pinball_loss
{% endraw %} {% raw %}
device = 'cpu'
if t.cuda.is_available(): device = 'cuda'  

nbeats_space= {# Architecture parameters
               'model':'nbeats',
               'mode': 'simple',
               'n_time_in': hp.choice('n_time_in', [7*24]),
               'n_time_out': hp.choice('n_time_out', [24]),
               'n_x_hidden': hp.quniform('n_x_hidden', 1, 10, 1),
               'n_s_hidden': hp.choice('n_s_hidden', [0]),
               'shared_weights': hp.choice('shared_weights', [False]),
               'activation': hp.choice('activation', ['SELU']),
               'initialization':  hp.choice('initialization', ['glorot_normal','he_normal']),
               'stack_types': hp.choice('stack_types', [2*['identity'],
                                                        1*['identity']+1*['exogenous_tcn'],
                                                        1*['exogenous_tcn']+1*['identity'] ]),
               'n_blocks': hp.choice('n_blocks', [ [1, 1] ]),
               'n_layers': hp.choice('n_layers', [ [2, 2] ]),
               'n_hidden': hp.choice('n_hidden', [ 256 ]),
               'n_harmonics': hp.choice('n_harmonics', [1]),
               'n_polynomials': hp.choice('n_polynomials', [2]),
               # Regularization and optimization parameters
               'batch_normalization': hp.choice('batch_normalization', [False]),
               'dropout_prob_theta': hp.uniform('dropout_prob_theta', 0, 0.5),
               'dropout_prob_exogenous': hp.uniform('dropout_prob_exogenous', 0, 0.5),
               'learning_rate': hp.loguniform('learning_rate', np.log(5e-4), np.log(0.001)),
               'lr_decay': hp.uniform('lr_decay', 0.3, 0.5),
               'lr_decay_step_size': hp.choice('lr_decay_step_size', [100]), 
               'weight_decay': hp.loguniform('weight_decay', np.log(5e-5), np.log(5e-3)),
               'max_epochs': hp.choice('max_epochs', [10]), #'n_iterations': hp.choice('n_iterations', [10])
               'max_steps': hp.choice('max_steps', [None]),
               'early_stop_patience': hp.choice('early_stop_patience', [16]),
               'eval_freq': hp.choice('eval_freq', [50]),
               'n_val_weeks': hp.choice('n_val_weeks', [52*2]),
               'loss_train': hp.choice('loss', ['MAE']),
               'loss_hypar': hp.choice('loss_hypar', [0.5]),                
               'loss_valid': hp.choice('loss_valid', ['MAE']), #[args.val_loss]),
               'l1_theta': hp.choice('l1_theta', [0]),
               # Data parameters
               'len_sample_chunks': hp.choice('len_sample_chunks', [None]),
               'normalizer_y': hp.choice('normalizer_y', [None]),
               'normalizer_x': hp.choice('normalizer_x', ['median']),
               'window_sampling_limit': hp.choice('window_sampling_limit', [100_000]),
               'complete_inputs': hp.choice('complete_inputs', [False]),
               'complete_sample': hp.choice('complete_sample', [False]),                
               'frequency': hp.choice('frequency', ['H']),
               'seasonality': hp.choice('seasonality', [24]),      
               'idx_to_sample_freq': hp.choice('idx_to_sample_freq', [24]),
               'val_idx_to_sample_freq': hp.choice('val_idx_to_sample_freq', [24]),
               'batch_size': hp.choice('batch_size', [256]),
               'n_series_per_batch': hp.choice('n_series_per_batch', [1]),
               'random_seed': hp.quniform('random_seed', 10, 20, 1),
               'device': hp.choice('device', [device])}

mc = {'model':'nbeats',
      # Architecture parameters
      'n_time_in': 7*24,
      'n_time_out': 24,
      'n_x_hidden': 3,
      'n_s_hidden': 0,
      'shared_weights': False,
      'activation': 'SELU',
      'initialization': 'he_normal',
      'stack_types': ['exogenous_tcn']+1*['identity'],
      'n_blocks': [1, 1],
      'n_layers': [2, 2],
      'n_hidden': 364,
      'n_polynomials': 2,
      'n_harmonics': 1,
      # Regularization and optimization parameters
      'max_epochs': 10, #'n_iterations': 100,
      'max_steps': None,      
      'early_stop_patience': 8,
      'batch_normalization': False,
      'dropout_prob_theta': 0.2,
      'learning_rate': 0.0005, #0.002,
      'lr_decay': 0.64,
      'lr_decay_step_size': 100,
      'weight_decay': 0.00015,
      'eval_freq': 50,
      'n_val_weeks': 52*2,
      'loss_train': 'PINBALL',
      'loss_hypar': 0.5, #0.49,
      'loss_valid': 'MAE',
      'l1_theta': 0,
      # Data parameters
      'normalizer_y': None,
      'normalizer_x': 'median',
      'window_sampling_limit': 100_000,
      'complete_inputs': False,
      'frequency':'H',
      'seasonality': 24,
      'idx_to_sample_freq': 24,
      'val_idx_to_sample_freq': 24,
      'batch_size': 256,
      'n_series_per_batch': 1,
      'random_seed': 10,
      'device': 'cpu'}
{% endraw %} {% raw %}
esrnn_space = {'model': hp.choice('model', ['esrnn']),
               'mode': 'full',
               # Architecture parameters
               'n_time_in': hp.choice('n_time_in', [7*24]),
               'n_time_out': hp.choice('n_time_out', [24]),
               'dilations': hp.choice('dilations', [ [[1, 2]], [[1,2], [7, 14]] ]),
               'es_component': hp.choice('es_component', ['multiplicative']),
               'cell_type': hp.choice('cell_type', ['LSTM']),
               'state_hsize': hp.quniform('state_hsize', 10, 100, 10),
               'add_nl_layer': hp.choice('add_nl_layer', [True, False]),
               'seasonality': hp.choice('seasonality', [ [24] ]),
               # Regularization and optimization parameters
               'max_epochs':hp.choice('max_epochs', [10]),
               'max_steps':hp.choice('max_steps', [None]),
               'early_stop_patience':hp.choice('early_stop_patience', [10]),
               'eval_freq': hp.choice('eval_freq', [10]),
               'batch_size': hp.choice('batch_size', [32]),
               'learning_rate': hp.loguniform('learning_rate', np.log(5e-4), np.log(0.01)),
               'lr_decay': hp.quniform('lr_decay', 0.5, 0.8, 0.1),
               'lr_decay_step_size': hp.choice('lr_decay_step_size', [100]), 
               'per_series_lr_multip': hp.choice('per_series_lr_multip', [0.5, 1.0, 1.5, 2.0, 3.0]),
               'gradient_eps': hp.choice('gradient_eps', [1e-8]),
               'gradient_clipping_threshold': hp.choice('gradient_clipping_threshold', [10, 50]),
               'rnn_weight_decay': hp.choice('rnn_weight_decay', [0, 0.0005, 0.005]),
               'noise_std': hp.loguniform('noise_std', np.log(0.0001), np.log(0.001)),
               'level_variability_penalty': hp.quniform('level_variability_penalty', 0, 100, 10),
               'testing_percentile': hp.choice('testing_percentile', [50]),
               'training_percentile': hp.choice('training_percentile', [48, 49, 50, 51]),
               'random_seed': hp.quniform('random_seed', 1, 1000, 1),
               'loss_train': hp.choice('loss_train', ['SMYL']),
               'loss_valid': hp.choice('loss_valid', ['MAE']),
               # Data parameters
               'len_sample_chunks': hp.choice('len_sample_chunks', [7*3*24]),
               'window_sampling_limit': hp.choice('window_sampling_limit', [500_000]),
               'complete_inputs': hp.choice('complete_inputs', [True]),
               'complete_sample': hp.choice('complete_sample', [True]),
               'sample_freq': hp.choice('sample_freq', [24]),
               'val_sample_freq': hp.choice('val_sample_freq', [24]),
               'n_series_per_batch': hp.choice('n_series_per_batch', [1]),
               'normalizer_y': hp.choice('normalizer_y', [None]),
               'normalizer_x': hp.choice('normalizer_x',  [None])}

mc = {'model':'esrnn',
      'mode': 'full',
      # Architecture parameters
      'n_series': 1,
      'n_time_in': 7*24,
      'n_time_out': 24,
      'n_x': 1,
      'n_s': 1,
      'dilations': [[1,2], [7]],
      'es_component': 'multiplicative',
      'cell_type': 'LSTM',
      'state_hsize': 50,
      'add_nl_layer': False,
      'seasonality': [24],
      # Regularization and optimization parameters
      'max_epochs': 10, #'n_iterations': 100,
      'max_steps': None,
      'early_stop_patience': 10,
      'eval_freq': 10,
      'batch_size': 32,
      'eq_batch_size': False,
      'learning_rate': 0.0005,
      'lr_decay': 0.8,
      'lr_decay_step_size': 100,
      'per_series_lr_multip': 1.5,
      'gradient_eps': 1e-8, 
      'gradient_clipping_threshold': 20,
      'rnn_weight_decay': 0.0,
      'noise_std': 0.0005,
      'level_variability_penalty': 10,
      'testing_percentile': 50,
      'training_percentile': 50,
      'random_seed': 1,
      'loss_train': 'SMYL',
      'loss_valid': 'MAE',
      # Data parameters
      'len_sample_chunks': 7*4*24,
      'window_sampling_limit': 500_000,
      'complete_inputs': True,
      'sample_freq': 24,
      'val_idx_to_sample_freq': 24,
      'n_series_per_batch': 1,
      'normalizer_y': None,
      'normalizer_x': None}

model = instantiate_esrnn(mc)
{% endraw %} {% raw %}
device = 'cpu'
if t.cuda.is_available(): device = 'cuda'  

deepmidas_space= {# Architecture parameters
               'model':'deepmidas',
               'mode': 'simple',
               'n_time_in': hp.choice('n_time_in', [7*24]),
               'n_time_out': hp.choice('n_time_out', [24]),
               'n_x_hidden': hp.quniform('n_x_hidden', 1, 10, 1),
               'n_s_hidden': hp.choice('n_s_hidden', [0]),
               'shared_weights': hp.choice('shared_weights', [False]),
               'activation': hp.choice('activation', ['SELU']),
               'initialization':  hp.choice('initialization', ['glorot_normal','he_normal']),
               'stack_types': hp.choice('stack_types', [2*['identity']]),
               'n_blocks': hp.choice('n_blocks', [ [1, 1] ]),
               'n_layers': hp.choice('n_layers', [ [2, 2] ]),
               'n_hidden': hp.choice('n_hidden', [ 256 ]),
               'n_pool_kernel_size': hp.choice('n_pool_kernel_size', [ [ 4, 1 ] ]),
               'n_freq_downsample': hp.choice('n_freq_downsample', [ [ 24, 1 ] ]),
               # Regularization and optimization parameters
               'batch_normalization': hp.choice('batch_normalization', [False]),
               'dropout_prob_theta': hp.uniform('dropout_prob_theta', 0, 0.5),
               'dropout_prob_exogenous': hp.uniform('dropout_prob_exogenous', 0, 0.5),
               'learning_rate': hp.loguniform('learning_rate', np.log(5e-4), np.log(0.001)),
               'lr_decay': hp.uniform('lr_decay', 0.3, 0.5),
               'lr_decay_step_size': hp.choice('lr_decay_step_size', [100]), 
               'weight_decay': hp.loguniform('weight_decay', np.log(5e-5), np.log(5e-3)),
               'max_epochs': hp.choice('max_epochs', [10]), #'n_iterations': hp.choice('n_iterations', [10])
               'max_steps': hp.choice('max_steps', [None]),
               'early_stop_patience': hp.choice('early_stop_patience', [16]),
               'eval_freq': hp.choice('eval_freq', [50]),
               'n_val_weeks': hp.choice('n_val_weeks', [52*2]),
               'loss_train': hp.choice('loss', ['MAE']),
               'loss_hypar': hp.choice('loss_hypar', [0.5]),                
               'loss_valid': hp.choice('loss_valid', ['MAE']), #[args.val_loss]),
               'l1_theta': hp.choice('l1_theta', [0]),
               # Data parameters
               'len_sample_chunks': hp.choice('len_sample_chunks', [None]),
               'normalizer_y': hp.choice('normalizer_y', [None]),
               'normalizer_x': hp.choice('normalizer_x', ['median']),
               'window_sampling_limit': hp.choice('window_sampling_limit', [100_000]),
               'complete_inputs': hp.choice('complete_inputs', [False]),
               'complete_sample': hp.choice('complete_sample', [False]),                
               'frequency': hp.choice('frequency', ['H']),
               'seasonality': hp.choice('seasonality', [24]),      
               'idx_to_sample_freq': hp.choice('idx_to_sample_freq', [24]),
               'val_idx_to_sample_freq': hp.choice('val_idx_to_sample_freq', [24]),
               'batch_size': hp.choice('batch_size', [256]),
               'n_series_per_batch': hp.choice('n_series_per_batch', [1]),
               'random_seed': hp.quniform('random_seed', 10, 20, 1),
               'device': hp.choice('device', [device])}

mc = {'model':'deepmidas',
      # Architecture parameters
      'n_time_in': 7*24,
      'n_time_out': 24,
      'n_x_hidden': 3,
      'n_s_hidden': 0,
      'shared_weights': False,
      'activation': 'SELU',
      'initialization': 'he_normal',
      'stack_types': ['identity', 'identity'],
      'n_blocks': [1, 1],
      'n_layers': [2, 2],
      'n_pool_kernel_size': [4, 1],
      'n_freq_downsample': [24, 1],
      'n_hidden': 364,
      # Regularization and optimization parameters
      'max_epochs': 10, #'n_iterations': 100,
      'max_steps': None,      
      'early_stop_patience': 8,
      'batch_normalization': False,
      'dropout_prob_theta': 0.2,
      'learning_rate': 0.0005, #0.002,
      'lr_decay': 0.64,
      'lr_decay_step_size': 100,
      'weight_decay': 0.00015,
      'eval_freq': 50,
      'n_val_weeks': 52*2,
      'loss_train': 'PINBALL',
      'loss_hypar': 0.5, #0.49,
      'loss_valid': 'MAE',
      'l1_theta': 0,
      # Data parameters
      'normalizer_y': None,
      'normalizer_x': 'median',
      'window_sampling_limit': 100_000,
      'complete_inputs': False,
      'frequency':'H',
      'seasonality': 24,
      'idx_to_sample_freq': 24,
      'val_idx_to_sample_freq': 24,
      'batch_size': 256,
      'n_series_per_batch': 1,
      'random_seed': 10,
      'device': 'cpu'}
{% endraw %} {% raw %}
from nixtlats.data.datasets.epf import EPF, EPFInfo
import matplotlib.pyplot as plt

dataset = ['NP']

Y_df, X_df, S_df = EPF.load_groups(directory='data', groups=dataset)

X_df = X_df[['unique_id', 'ds', 'week_day']]
Y_min = Y_df.y.min()
#Y_df.y = Y_df.y - Y_min + 20

plt.plot(Y_df.y.values)
plt.show()
{% endraw %} {% raw %}
# backpropagation trough time is slow
# result = evaluate_model(loss_function=mae, mc=mc, 
#                         S_df=S_df, Y_df=Y_df, X_df=X_df, f_cols=[],
#                         ds_in_test=0, ds_in_val=728*24,
#                         n_uids=None, n_val_windows=None, freq=None,
#                         is_val_random=False, loss_kwargs={})
# result
{% endraw %} {% raw %}
# plt.plot(Y_df['y'][-728*24:].values)
{% endraw %} {% raw %}
trials = hyperopt_tunning(space=deepmidas_space, hyperopt_max_evals=2, loss_function=mae,
                          S_df=S_df, Y_df=Y_df, X_df=X_df, f_cols=[],
                          ds_in_val=728*24, n_uids=None, n_val_windows=None, freq=None,
                          is_val_random=False, loss_kwargs={})
  0%|          | 0/2 [00:00<?, ?trial/s, best loss=?]
INFO:hyperopt.tpe:build_posterior_wrapper took 0.074404 seconds
INFO:hyperopt.tpe:TPE using 0 trials
===============================================

activation                                SELU
batch_normalization                      False
batch_size                                 256
complete_inputs                          False
complete_sample                          False
device                                     cpu
dropout_prob_exogenous                0.206928
dropout_prob_theta                    0.095228
early_stop_patience                         16
eval_freq                                   50
frequency                                    H
idx_to_sample_freq                          24
initialization                       he_normal
l1_theta                                     0
learning_rate                          0.00074
len_sample_chunks                         None
loss_hypar                                 0.5
loss_train                                 MAE
loss_valid                                 MAE
lr_decay                              0.488883
lr_decay_step_size                         100
max_epochs                                  10
max_steps                                 None
mode                                    simple
model                                deepmidas
n_blocks                                (1, 1)
n_freq_downsample                      (24, 1)
n_hidden                                   256
n_layers                                (2, 2)
n_pool_kernel_size                      (4, 1)
n_s_hidden                                   0
n_series_per_batch                           1
n_time_in                                  168
n_time_out                                  24
n_val_weeks                                104
n_x_hidden                                 4.0
normalizer_x                            median
normalizer_y                              None
random_seed                               13.0
seasonality                                 24
shared_weights                           False
stack_types               (identity, identity)
val_idx_to_sample_freq                      24
weight_decay                          0.003854
window_sampling_limit                   100000
dtype: object
===============================================

  0%|          | 0/2 [00:00<?, ?trial/s, best loss=?]
INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2016-12-27 2018-12-24 23:00:00
          1           2013-01-01 2016-12-26 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=66.67, 	34944 time stamps 
Outsample percentage=33.33, 	17472 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2016-12-26 23:00:00
          1           2016-12-27 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=33.33, 	17472 time stamps 
Outsample percentage=66.67, 	34944 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=0.0, 	0 time stamps 
Outsample percentage=100.0, 	52416 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

GPU available: False, used: False
TPU available: False, using: 0 TPU cores
IPU available: False, using: 0 IPUs

  | Name  | Type       | Params
-------------------------------------
0 | model | _DeepMIDAS | 376 K 
-------------------------------------
376 K     Trainable params
0         Non-trainable params
376 K     Total params
1.508     Total estimated model params size (MB)
Validation sanity check: 0it [00:00, ?it/s]
Validation sanity check:   0%|          | 0/1 [00:00<?, ?it/s]
  0%|          | 0/2 [00:00<?, ?trial/s, best loss=?]
/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, val dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, train dataloader, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:323: UserWarning: The number of training samples (1) is smaller than the logging interval Trainer(log_every_n_steps=50). Set a lower value for log_every_n_steps if you want to see logs for the training epoch.
  f"The number of training samples ({self.num_training_batches}) is smaller than the logging interval"

Training: -1it [00:00, ?it/s]
Training:   0%|          | 0/2 [00:00<00:00, 31300.78it/s]
Epoch 0:   0%|          | 0/2 [00:00<00:00, 434.96it/s]   
Epoch 0:  50%|#####     | 1/2 [00:00<00:00, 25.15it/s, loss=3.44, v_num=25, train_loss_step=3.440]
  0%|          | 0/2 [00:01<?, ?trial/s, best loss=?]
/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/torch/nn/functional.py:652: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at  /Users/distiller/project/conda/conda-bld/pytorch_1623459064158/work/c10/core/TensorImpl.h:1156.)
  return torch.max_pool1d(input, kernel_size, stride, padding, dilation, ceil_mode)

Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 0: 100%|##########| 2/2 [00:00<00:00, 27.67it/s, loss=3.44, v_num=25, train_loss_step=3.440, val_loss=6.790]
  0%|          | 0/2 [00:01<?, ?trial/s, best loss=?]
Metric val_loss improved. New best score: 6.787
Epoch 0:   0%|          | 0/2 [00:00<00:00, 17189.77it/s, loss=3.44, v_num=25, train_loss_step=3.440, val_loss=6.790]
Epoch 1:   0%|          | 0/2 [00:00<00:00, 168.86it/s, loss=3.44, v_num=25, train_loss_step=3.440, val_loss=6.790]  
Epoch 1:  50%|#####     | 1/2 [00:00<00:00, 17.28it/s, loss=3.44, v_num=25, train_loss_step=3.440, val_loss=6.790] 
Epoch 1:  50%|#####     | 1/2 [00:00<00:00, 16.80it/s, loss=4.69, v_num=25, train_loss_step=5.940, val_loss=6.790, train_loss_epoch=3.440]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 1: 100%|##########| 2/2 [00:00<00:00, 19.33it/s, loss=4.69, v_num=25, train_loss_step=5.940, val_loss=4.150, train_loss_epoch=3.440]
  0%|          | 0/2 [00:01<?, ?trial/s, best loss=?]
Metric val_loss improved by 2.640 >= min_delta = 0.0001. New best score: 4.147
Epoch 1:   0%|          | 0/2 [00:00<00:00, 20360.70it/s, loss=4.69, v_num=25, train_loss_step=5.940, val_loss=4.150, train_loss_epoch=3.440]
Epoch 2:   0%|          | 0/2 [00:00<00:00, 359.56it/s, loss=4.69, v_num=25, train_loss_step=5.940, val_loss=4.150, train_loss_epoch=3.440]  
Epoch 2:  50%|#####     | 1/2 [00:00<00:00, 27.44it/s, loss=4.29, v_num=25, train_loss_step=3.500, val_loss=4.150, train_loss_epoch=5.940] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 2: 100%|##########| 2/2 [00:00<00:00, 29.66it/s, loss=4.29, v_num=25, train_loss_step=3.500, val_loss=4.840, train_loss_epoch=5.940]
Epoch 2:   0%|          | 0/2 [00:00<00:00, 21290.88it/s, loss=4.29, v_num=25, train_loss_step=3.500, val_loss=4.840, train_loss_epoch=5.940]
Epoch 3:   0%|          | 0/2 [00:00<00:00, 365.23it/s, loss=4.29, v_num=25, train_loss_step=3.500, val_loss=4.840, train_loss_epoch=5.940]  
Epoch 3:  50%|#####     | 1/2 [00:00<00:00, 26.65it/s, loss=4.21, v_num=25, train_loss_step=3.970, val_loss=4.840, train_loss_epoch=3.500] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 3: 100%|##########| 2/2 [00:00<00:00, 28.05it/s, loss=4.21, v_num=25, train_loss_step=3.970, val_loss=4.500, train_loss_epoch=3.500]
Epoch 3:   0%|          | 0/2 [00:00<00:00, 20460.02it/s, loss=4.21, v_num=25, train_loss_step=3.970, val_loss=4.500, train_loss_epoch=3.500]
Epoch 4:   0%|          | 0/2 [00:00<00:00, 314.96it/s, loss=4.21, v_num=25, train_loss_step=3.970, val_loss=4.500, train_loss_epoch=3.500]  
Epoch 4:  50%|#####     | 1/2 [00:00<00:00, 22.06it/s, loss=4.11, v_num=25, train_loss_step=3.720, val_loss=4.500, train_loss_epoch=3.970] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 4: 100%|##########| 2/2 [00:00<00:00, 21.96it/s, loss=4.11, v_num=25, train_loss_step=3.720, val_loss=3.120, train_loss_epoch=3.970]
  0%|          | 0/2 [00:01<?, ?trial/s, best loss=?]
Metric val_loss improved by 1.032 >= min_delta = 0.0001. New best score: 3.115
Epoch 4:   0%|          | 0/2 [00:00<00:00, 20068.44it/s, loss=4.11, v_num=25, train_loss_step=3.720, val_loss=3.120, train_loss_epoch=3.970]
Epoch 5:   0%|          | 0/2 [00:00<00:00, 249.68it/s, loss=4.11, v_num=25, train_loss_step=3.720, val_loss=3.120, train_loss_epoch=3.970]  
Epoch 5:  50%|#####     | 1/2 [00:00<00:00, 18.89it/s, loss=4.11, v_num=25, train_loss_step=3.720, val_loss=3.120, train_loss_epoch=3.970] 
Epoch 5:  50%|#####     | 1/2 [00:00<00:00, 18.18it/s, loss=3.87, v_num=25, train_loss_step=2.670, val_loss=3.120, train_loss_epoch=3.720]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 5: 100%|##########| 2/2 [00:00<00:00, 18.66it/s, loss=3.87, v_num=25, train_loss_step=2.670, val_loss=3.230, train_loss_epoch=3.720]
Epoch 5:   0%|          | 0/2 [00:00<00:00, 8542.37it/s, loss=3.87, v_num=25, train_loss_step=2.670, val_loss=3.230, train_loss_epoch=3.720]
Epoch 6:   0%|          | 0/2 [00:00<00:00, 196.77it/s, loss=3.87, v_num=25, train_loss_step=2.670, val_loss=3.230, train_loss_epoch=3.720] 
Epoch 6:  50%|#####     | 1/2 [00:00<00:00, 23.30it/s, loss=3.72, v_num=25, train_loss_step=2.820, val_loss=3.230, train_loss_epoch=2.670] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 6: 100%|##########| 2/2 [00:00<00:00, 25.09it/s, loss=3.72, v_num=25, train_loss_step=2.820, val_loss=3.570, train_loss_epoch=2.670]
Epoch 6:   0%|          | 0/2 [00:00<00:00, 21183.35it/s, loss=3.72, v_num=25, train_loss_step=2.820, val_loss=3.570, train_loss_epoch=2.670]
Epoch 7:   0%|          | 0/2 [00:00<00:00, 346.15it/s, loss=3.72, v_num=25, train_loss_step=2.820, val_loss=3.570, train_loss_epoch=2.670]  
Epoch 7:  50%|#####     | 1/2 [00:00<00:00, 25.79it/s, loss=3.65, v_num=25, train_loss_step=3.110, val_loss=3.570, train_loss_epoch=2.820] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 7: 100%|##########| 2/2 [00:00<00:00, 27.61it/s, loss=3.65, v_num=25, train_loss_step=3.110, val_loss=3.200, train_loss_epoch=2.820]
Epoch 7:   0%|          | 0/2 [00:00<00:00, 21399.51it/s, loss=3.65, v_num=25, train_loss_step=3.110, val_loss=3.200, train_loss_epoch=2.820]
Epoch 8:   0%|          | 0/2 [00:00<00:00, 385.65it/s, loss=3.65, v_num=25, train_loss_step=3.110, val_loss=3.200, train_loss_epoch=2.820]  
Epoch 8:  50%|#####     | 1/2 [00:00<00:00, 26.70it/s, loss=3.55, v_num=25, train_loss_step=2.790, val_loss=3.200, train_loss_epoch=3.110] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 8: 100%|##########| 2/2 [00:00<00:00, 27.04it/s, loss=3.55, v_num=25, train_loss_step=2.790, val_loss=2.830, train_loss_epoch=3.110]
  0%|          | 0/2 [00:02<?, ?trial/s, best loss=?]
Metric val_loss improved by 0.281 >= min_delta = 0.0001. New best score: 2.834
Epoch 8:   0%|          | 0/2 [00:00<00:00, 19152.07it/s, loss=3.55, v_num=25, train_loss_step=2.790, val_loss=2.830, train_loss_epoch=3.110]
Epoch 9:   0%|          | 0/2 [00:00<00:00, 286.28it/s, loss=3.55, v_num=25, train_loss_step=2.790, val_loss=2.830, train_loss_epoch=3.110]  
Epoch 9:  50%|#####     | 1/2 [00:00<00:00, 20.56it/s, loss=3.44, v_num=25, train_loss_step=2.460, val_loss=2.830, train_loss_epoch=2.790] 
Epoch 9: 100%|##########| 2/2 [00:00<00:00, 29.17it/s, loss=3.44, v_num=25, train_loss_step=2.460, val_loss=2.830, train_loss_epoch=2.790]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 9: 100%|##########| 2/2 [00:00<00:00, 20.55it/s, loss=3.44, v_num=25, train_loss_step=2.460, val_loss=3.040, train_loss_epoch=2.790]
Epoch 9: 100%|##########| 2/2 [00:00<00:00, 17.28it/s, loss=3.44, v_num=25, train_loss_step=2.460, val_loss=3.040, train_loss_epoch=2.790]
  0%|          | 0/2 [00:02<?, ?trial/s, best loss=?]
/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, predict dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

Predicting: 1it [00:00, ?it/s]
Predicting: 100%|##########| 1/1 [00:00<?, ?it/s]
y_true.shape (#n_series, #n_fcds, #lt): (728,)
y_hat.shape (#n_series, #n_fcds, #lt): (728,)
 50%|█████     | 1/2 [00:02<00:02,  2.58s/trial, best loss: 1.9853875637054443]
INFO:hyperopt.tpe:build_posterior_wrapper took 0.013553 seconds
INFO:hyperopt.tpe:TPE using 1/1 trials with best loss 1.985388
===============================================

activation                                SELU
batch_normalization                      False
batch_size                                 256
complete_inputs                          False
complete_sample                          False
device                                     cpu
dropout_prob_exogenous                0.459546
dropout_prob_theta                    0.337756
early_stop_patience                         16
eval_freq                                   50
frequency                                    H
idx_to_sample_freq                          24
initialization                   glorot_normal
l1_theta                                     0
learning_rate                         0.000546
len_sample_chunks                         None
loss_hypar                                 0.5
loss_train                                 MAE
loss_valid                                 MAE
lr_decay                              0.412224
lr_decay_step_size                         100
max_epochs                                  10
max_steps                                 None
mode                                    simple
model                                deepmidas
n_blocks                                (1, 1)
n_freq_downsample                      (24, 1)
n_hidden                                   256
n_layers                                (2, 2)
n_pool_kernel_size                      (4, 1)
n_s_hidden                                   0
n_series_per_batch                           1
n_time_in                                  168
n_time_out                                  24
n_val_weeks                                104
n_x_hidden                                10.0
normalizer_x                            median
normalizer_y                              None
random_seed                               14.0
seasonality                                 24
shared_weights                           False
stack_types               (identity, identity)
val_idx_to_sample_freq                      24
weight_decay                          0.000399
window_sampling_limit                   100000
dtype: object
===============================================

 50%|█████     | 1/2 [00:02<00:02,  2.58s/trial, best loss: 1.9853875637054443]
INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2016-12-27 2018-12-24 23:00:00
          1           2013-01-01 2016-12-26 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=66.67, 	34944 time stamps 
Outsample percentage=33.33, 	17472 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2016-12-26 23:00:00
          1           2016-12-27 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=33.33, 	17472 time stamps 
Outsample percentage=66.67, 	34944 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=0.0, 	0 time stamps 
Outsample percentage=100.0, 	52416 time stamps 

/Users/cchallu/NIXTLA/nixtlats/nixtlats/data/tsdataset.py:208: FutureWarning: In a future version of pandas all arguments of DataFrame.drop except for the argument 'labels' will be keyword-only
  X.drop(['unique_id', 'ds'], 1, inplace=True)

GPU available: False, used: False
TPU available: False, using: 0 TPU cores
IPU available: False, using: 0 IPUs

  | Name  | Type       | Params
-------------------------------------
0 | model | _DeepMIDAS | 376 K 
-------------------------------------
376 K     Trainable params
0         Non-trainable params
376 K     Total params
1.508     Total estimated model params size (MB)
Validation sanity check: 0it [00:00, ?it/s]
Validation sanity check:   0%|          | 0/1 [00:00<?, ?it/s]
 50%|█████     | 1/2 [00:03<00:02,  2.58s/trial, best loss: 1.9853875637054443]
/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, val dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, train dataloader, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:323: UserWarning: The number of training samples (1) is smaller than the logging interval Trainer(log_every_n_steps=50). Set a lower value for log_every_n_steps if you want to see logs for the training epoch.
  f"The number of training samples ({self.num_training_batches}) is smaller than the logging interval"

Training: -1it [00:00, ?it/s]
Training:   0%|          | 0/2 [00:00<00:00, 18808.54it/s]
Epoch 0:   0%|          | 0/2 [00:00<00:00, 419.47it/s]   
Epoch 0:  50%|#####     | 1/2 [00:00<00:00, 25.13it/s, loss=4.14, v_num=26, train_loss_step=4.140]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 0: 100%|##########| 2/2 [00:00<00:00, 24.93it/s, loss=4.14, v_num=26, train_loss_step=4.140, val_loss=5.750]
 50%|█████     | 1/2 [00:03<00:02,  2.58s/trial, best loss: 1.9853875637054443]
Metric val_loss improved. New best score: 5.752
Epoch 0:   0%|          | 0/2 [00:00<00:00, 13888.42it/s, loss=4.14, v_num=26, train_loss_step=4.140, val_loss=5.750]
Epoch 1:   0%|          | 0/2 [00:00<00:00, 316.17it/s, loss=4.14, v_num=26, train_loss_step=4.140, val_loss=5.750]  
Epoch 1:  50%|#####     | 1/2 [00:00<00:00, 22.27it/s, loss=4.58, v_num=26, train_loss_step=5.020, val_loss=5.750, train_loss_epoch=4.140]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 1: 100%|##########| 2/2 [00:00<00:00, 23.07it/s, loss=4.58, v_num=26, train_loss_step=5.020, val_loss=3.090, train_loss_epoch=4.140]
 50%|█████     | 1/2 [00:04<00:02,  2.58s/trial, best loss: 1.9853875637054443]
Metric val_loss improved by 2.664 >= min_delta = 0.0001. New best score: 3.088
Epoch 1:   0%|          | 0/2 [00:00<00:00, 18893.26it/s, loss=4.58, v_num=26, train_loss_step=5.020, val_loss=3.090, train_loss_epoch=4.140]
Epoch 2:   0%|          | 0/2 [00:00<00:00, 207.46it/s, loss=4.58, v_num=26, train_loss_step=5.020, val_loss=3.090, train_loss_epoch=4.140]  
Epoch 2:  50%|#####     | 1/2 [00:00<00:00, 21.81it/s, loss=4.19, v_num=26, train_loss_step=3.410, val_loss=3.090, train_loss_epoch=5.020] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 2: 100%|##########| 2/2 [00:00<00:00, 23.29it/s, loss=4.19, v_num=26, train_loss_step=3.410, val_loss=4.460, train_loss_epoch=5.020]
Epoch 2:   0%|          | 0/2 [00:00<00:00, 18157.16it/s, loss=4.19, v_num=26, train_loss_step=3.410, val_loss=4.460, train_loss_epoch=5.020]
Epoch 3:   0%|          | 0/2 [00:00<00:00, 235.52it/s, loss=4.19, v_num=26, train_loss_step=3.410, val_loss=4.460, train_loss_epoch=5.020]  
Epoch 3:  50%|#####     | 1/2 [00:00<00:00, 20.89it/s, loss=4.14, v_num=26, train_loss_step=4.000, val_loss=4.460, train_loss_epoch=3.410] 
Epoch 3: 100%|##########| 2/2 [00:00<00:00, 29.57it/s, loss=4.14, v_num=26, train_loss_step=4.000, val_loss=4.460, train_loss_epoch=3.410]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 3: 100%|##########| 2/2 [00:00<00:00, 19.77it/s, loss=4.14, v_num=26, train_loss_step=4.000, val_loss=4.120, train_loss_epoch=3.410]
Epoch 3:   0%|          | 0/2 [00:00<00:00, 14315.03it/s, loss=4.14, v_num=26, train_loss_step=4.000, val_loss=4.120, train_loss_epoch=3.410]
Epoch 4:   0%|          | 0/2 [00:00<00:00, 150.11it/s, loss=4.14, v_num=26, train_loss_step=4.000, val_loss=4.120, train_loss_epoch=3.410]  
Epoch 4:  50%|#####     | 1/2 [00:00<00:00, 18.75it/s, loss=4.07, v_num=26, train_loss_step=3.780, val_loss=4.120, train_loss_epoch=4.000] 
Epoch 4: 100%|##########| 2/2 [00:00<00:00, 26.40it/s, loss=4.07, v_num=26, train_loss_step=3.780, val_loss=4.120, train_loss_epoch=4.000]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 4: 100%|##########| 2/2 [00:00<00:00, 17.95it/s, loss=4.07, v_num=26, train_loss_step=3.780, val_loss=2.920, train_loss_epoch=4.000]
 50%|█████     | 1/2 [00:04<00:02,  2.58s/trial, best loss: 1.9853875637054443]
Metric val_loss improved by 0.166 >= min_delta = 0.0001. New best score: 2.922
Epoch 4:   0%|          | 0/2 [00:00<00:00, 18893.26it/s, loss=4.07, v_num=26, train_loss_step=3.780, val_loss=2.920, train_loss_epoch=4.000]
Epoch 5:   0%|          | 0/2 [00:00<00:00, 266.95it/s, loss=4.07, v_num=26, train_loss_step=3.780, val_loss=2.920, train_loss_epoch=4.000]  
Epoch 5:  50%|#####     | 1/2 [00:00<00:00, 20.26it/s, loss=3.89, v_num=26, train_loss_step=3.000, val_loss=2.920, train_loss_epoch=3.780] 
Epoch 5: 100%|##########| 2/2 [00:00<00:00, 28.26it/s, loss=3.89, v_num=26, train_loss_step=3.000, val_loss=2.920, train_loss_epoch=3.780]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 5: 100%|##########| 2/2 [00:00<00:00, 20.17it/s, loss=3.89, v_num=26, train_loss_step=3.000, val_loss=3.030, train_loss_epoch=3.780]
Epoch 5:   0%|          | 0/2 [00:00<00:00, 10330.80it/s, loss=3.89, v_num=26, train_loss_step=3.000, val_loss=3.030, train_loss_epoch=3.780]
Epoch 6:   0%|          | 0/2 [00:00<00:00, 270.57it/s, loss=3.89, v_num=26, train_loss_step=3.000, val_loss=3.030, train_loss_epoch=3.780]  
Epoch 6:  50%|#####     | 1/2 [00:00<00:00, 14.36it/s, loss=3.78, v_num=26, train_loss_step=3.090, val_loss=3.030, train_loss_epoch=3.000] 
Epoch 6: 100%|##########| 2/2 [00:00<00:00, 20.55it/s, loss=3.78, v_num=26, train_loss_step=3.090, val_loss=3.030, train_loss_epoch=3.000]
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 6: 100%|##########| 2/2 [00:00<00:00, 15.63it/s, loss=3.78, v_num=26, train_loss_step=3.090, val_loss=3.450, train_loss_epoch=3.000]
Epoch 6:   0%|          | 0/2 [00:00<00:00, 19239.93it/s, loss=3.78, v_num=26, train_loss_step=3.090, val_loss=3.450, train_loss_epoch=3.000]
Epoch 7:   0%|          | 0/2 [00:00<00:00, 276.00it/s, loss=3.78, v_num=26, train_loss_step=3.090, val_loss=3.450, train_loss_epoch=3.000]  
Epoch 7:  50%|#####     | 1/2 [00:00<00:00, 25.94it/s, loss=3.71, v_num=26, train_loss_step=3.230, val_loss=3.450, train_loss_epoch=3.090] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 7: 100%|##########| 2/2 [00:00<00:00, 27.92it/s, loss=3.71, v_num=26, train_loss_step=3.230, val_loss=3.200, train_loss_epoch=3.090]
Epoch 7:   0%|          | 0/2 [00:00<00:00, 20360.70it/s, loss=3.71, v_num=26, train_loss_step=3.230, val_loss=3.200, train_loss_epoch=3.090]
Epoch 8:   0%|          | 0/2 [00:00<00:00, 298.31it/s, loss=3.71, v_num=26, train_loss_step=3.230, val_loss=3.200, train_loss_epoch=3.090]  
Epoch 8:  50%|#####     | 1/2 [00:00<00:00, 26.30it/s, loss=3.64, v_num=26, train_loss_step=3.080, val_loss=3.200, train_loss_epoch=3.230] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 8: 100%|##########| 2/2 [00:00<00:00, 26.42it/s, loss=3.64, v_num=26, train_loss_step=3.080, val_loss=2.750, train_loss_epoch=3.230]
 50%|█████     | 1/2 [00:05<00:02,  2.58s/trial, best loss: 1.9853875637054443]
Metric val_loss improved by 0.174 >= min_delta = 0.0001. New best score: 2.749
Epoch 8:   0%|          | 0/2 [00:00<00:00, 12018.06it/s, loss=3.64, v_num=26, train_loss_step=3.080, val_loss=2.750, train_loss_epoch=3.230]
Epoch 9:   0%|          | 0/2 [00:00<00:00, 304.49it/s, loss=3.64, v_num=26, train_loss_step=3.080, val_loss=2.750, train_loss_epoch=3.230]  
Epoch 9:  50%|#####     | 1/2 [00:00<00:00, 22.69it/s, loss=3.55, v_num=26, train_loss_step=2.780, val_loss=2.750, train_loss_epoch=3.080] 
Validating: 0it [00:00, ?it/s]
Validating:   0%|          | 0/1 [00:00<?, ?it/s]
Epoch 9: 100%|##########| 2/2 [00:00<00:00, 22.42it/s, loss=3.55, v_num=26, train_loss_step=2.780, val_loss=2.730, train_loss_epoch=3.080]
 50%|█████     | 1/2 [00:05<00:02,  2.58s/trial, best loss: 1.9853875637054443]
Metric val_loss improved by 0.022 >= min_delta = 0.0001. New best score: 2.727
Epoch 9: 100%|##########| 2/2 [00:00<00:00, 18.78it/s, loss=3.55, v_num=26, train_loss_step=2.780, val_loss=2.730, train_loss_epoch=3.080]
 50%|█████     | 1/2 [00:05<00:02,  2.58s/trial, best loss: 1.9853875637054443]
/Users/cchallu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/trainer/data_loading.py:106: UserWarning: The dataloader, predict dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 12 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  f"The dataloader, {name}, does not have many workers which may be a bottleneck."

Predicting: 1it [00:00, ?it/s]
Predicting: 100%|##########| 1/1 [00:00<?, ?it/s]
y_true.shape (#n_series, #n_fcds, #lt): (728,)
y_hat.shape (#n_series, #n_fcds, #lt): (728,)
100%|██████████| 2/2 [00:05<00:00,  2.76s/trial, best loss: 1.9818161725997925]
{% endraw %} {% raw %}
trials.trials
[{'state': 2,
  'tid': 0,
  'spec': None,
  'result': {'loss': 1.9853875637054443,
   'mc': {'activation': 'SELU',
    'batch_normalization': False,
    'batch_size': 256,
    'complete_inputs': False,
    'complete_sample': False,
    'device': 'cpu',
    'dropout_prob_exogenous': 0.2069281539939935,
    'dropout_prob_theta': 0.09522759273514453,
    'early_stop_patience': 16,
    'eval_freq': 50,
    'frequency': 'H',
    'idx_to_sample_freq': 24,
    'initialization': 'he_normal',
    'l1_theta': 0,
    'learning_rate': 0.0007396856533221593,
    'len_sample_chunks': None,
    'loss_hypar': 0.5,
    'loss_train': 'MAE',
    'loss_valid': 'MAE',
    'lr_decay': 0.488883166981553,
    'lr_decay_step_size': 100,
    'max_epochs': 10,
    'max_steps': None,
    'mode': 'simple',
    'model': 'deepmidas',
    'n_blocks': (1, 1),
    'n_freq_downsample': (24, 1),
    'n_hidden': 256,
    'n_layers': (2, 2),
    'n_pool_kernel_size': (4, 1),
    'n_s_hidden': 0,
    'n_series_per_batch': 1,
    'n_time_in': 168,
    'n_time_out': 24,
    'n_val_weeks': 104,
    'n_x_hidden': 4.0,
    'normalizer_x': 'median',
    'normalizer_y': None,
    'random_seed': 13.0,
    'seasonality': 24,
    'shared_weights': False,
    'stack_types': ('identity', 'identity'),
    'val_idx_to_sample_freq': 24,
    'weight_decay': 0.003853558423761368,
    'window_sampling_limit': 100000,
    'n_x': 1,
    'n_s': 1,
    'n_theta_hidden': [[256, 256], [256, 256]]},
   'y_true': array([25.73, 29.37, 28.76, 25.95, 26.71, 29.36, 30.93, 28.3 , 30.58,
          33.32, 28.86, 30.5 , 29.72, 28.69, 28.  , 25.04, 28.72, 29.79,
          29.41, 30.89, 30.99, 29.56, 27.84, 28.19, 27.36, 27.84, 28.13,
          28.42, 28.4 , 27.41, 28.66, 28.67, 28.51, 27.96, 29.31, 29.99,
          28.97, 29.96, 30.59, 30.8 , 30.62, 31.01, 32.  , 33.14, 32.63,
          32.11, 31.77, 31.04, 31.35, 31.96, 31.01, 29.98, 30.36, 28.15,
          28.87, 27.66, 29.85, 26.38, 27.82, 30.46, 28.49, 28.67, 27.58,
          29.55, 29.93, 29.03, 30.83, 29.08, 30.54, 30.97, 32.54, 30.1 ,
          29.72, 31.56, 29.66, 29.13, 29.94, 25.2 , 28.56, 27.86, 25.65,
          29.17, 27.05, 27.23, 28.04, 28.94, 29.22, 28.55, 28.05, 28.15,
          28.5 , 28.9 , 29.68, 27.87, 28.14, 27.49, 27.38, 28.06, 27.12,
          24.5 , 26.57, 27.03, 27.59, 25.54, 22.99, 25.42, 24.72, 27.11,
          28.73, 27.15, 29.23, 28.95, 29.76, 29.91, 27.27, 24.44, 25.9 ,
          29.06, 28.89, 31.36, 31.24, 31.39, 30.1 , 30.12, 27.03, 27.1 ,
          28.  , 27.99, 27.48, 29.84, 30.25, 27.39, 30.99, 31.73, 32.4 ,
          28.76, 30.05, 30.05, 30.26, 30.21, 26.95, 26.68, 26.74, 25.06,
          25.17, 24.04, 25.8 , 25.6 , 25.65, 25.42, 26.33, 24.14, 24.06,
          26.78, 26.11, 25.61, 26.57, 26.97, 26.21, 26.29, 26.2 , 13.97,
          19.62, 26.43, 24.68, 24.5 , 24.28, 20.64, 25.43, 24.76, 24.89,
          22.06, 23.07, 24.79, 24.81, 24.03, 26.5 , 26.34, 24.42, 23.49,
          21.8 , 23.48, 24.23, 23.69, 23.57, 22.99, 24.14, 24.19, 24.33,
          25.6 , 26.23, 26.55, 27.41, 26.73, 27.4 , 27.12, 27.44, 26.51,
          26.96, 26.51, 26.33, 25.51, 25.02, 25.63, 25.47, 26.65, 27.45,
          26.47, 26.65, 26.77, 27.02, 26.89, 27.46, 26.35, 26.75, 26.36,
          26.42, 26.7 , 27.04, 25.04, 23.65, 24.86, 25.4 , 26.11, 24.57,
          25.86, 25.87, 25.57, 21.97, 26.01, 26.3 , 26.17, 26.85, 25.74,
          25.99, 23.77, 25.79, 27.04, 27.77, 27.7 , 28.65, 29.16, 29.65,
          30.36, 29.78, 30.32, 30.15, 31.4 , 32.27, 31.37, 31.55, 31.47,
          31.56, 29.66, 30.83, 29.27, 29.87, 27.35, 27.67, 28.47, 26.5 ,
          28.19, 30.22, 31.05, 31.52, 31.57, 31.55, 31.55, 30.51, 30.65,
          30.42, 29.68, 29.51, 29.8 , 29.38, 29.81, 27.96, 27.85, 24.06,
          20.41, 24.3 , 17.11, 26.12, 26.91, 26.6 , 28.79, 30.26, 28.56,
          27.11, 27.63, 26.63, 26.03, 21.72, 26.06, 25.26, 28.37, 27.81,
          27.91, 26.  , 28.97, 29.57, 27.19, 27.1 , 27.64, 20.1 , 20.06,
          20.03, 28.5 , 28.95, 23.95, 28.85, 27.1 , 24.96, 28.03, 28.25,
          28.3 , 28.57, 26.03, 26.01, 28.29, 29.71, 30.37, 29.74, 29.66,
          27.57, 27.58, 27.88, 28.67, 31.55, 29.76, 27.01, 26.05, 29.13,
          28.24, 28.01, 28.62, 30.82, 32.18, 30.92, 30.97, 27.9 , 28.24,
          29.83, 27.07, 27.94, 24.64, 26.06, 26.79, 27.52, 28.21, 27.96,
          26.49, 28.26, 29.03, 29.02, 28.59, 30.43, 29.06, 28.  , 27.3 ,
          26.94, 20.08, 25.65, 25.22, 24.51, 28.22, 26.82, 26.68, 25.59,
          25.43, 25.02, 27.55, 27.31, 28.07, 29.66, 29.03, 27.75, 28.11,
          28.6 , 30.37, 30.05, 29.53, 28.79, 28.82, 25.69, 28.05, 29.31,
          30.45, 32.79, 31.64, 30.52, 33.06, 28.08, 25.5 , 28.98, 30.08,
          28.01, 27.57, 28.33, 30.14, 27.7 , 29.53, 31.9 , 32.12, 32.78,
          34.9 , 33.7 , 32.74, 30.91, 31.17, 29.96, 28.87, 31.02, 31.14,
          29.74, 32.22, 32.97, 34.89, 33.77, 35.77, 37.41, 37.34, 38.21,
          36.05, 36.93, 36.48, 38.98, 37.72, 37.73, 39.09, 39.81, 38.41,
          38.17, 40.4 , 38.6 , 37.29, 35.57, 37.9 , 36.03, 35.68, 36.62,
          37.64, 37.75, 37.68, 37.43, 36.79, 37.14, 37.48, 39.6 , 37.88,
          41.43, 39.59, 38.68, 39.54, 42.1 , 39.38, 39.26, 40.87, 39.52,
          39.56, 39.22, 39.22, 39.13, 38.39, 38.09, 37.42, 38.62, 38.68,
          38.01, 37.64, 38.96, 38.5 , 37.96, 40.01, 39.18, 41.55, 39.83,
          38.04, 34.93, 32.64, 35.34, 33.97, 34.21, 34.77, 34.35, 35.93,
          36.02, 35.91, 33.77, 32.13, 31.09, 36.03, 35.26, 34.15, 28.06,
          27.2 , 24.88, 12.42,  4.44,  9.48, 30.11, 19.5 , 20.07, 32.98,
          32.97, 24.98, 29.06, 33.81, 31.77, 15.27, 28.05, 36.81, 36.07,
          36.34, 39.04, 39.12, 37.69, 39.17, 38.85, 42.26, 41.36, 44.16,
          42.2 , 40.66, 43.07, 44.91, 45.17, 46.5 , 47.28, 45.29, 46.73,
          45.1 , 46.11, 46.91, 41.1 , 45.56, 44.46, 44.48, 41.49, 42.22,
          41.97, 38.05, 37.47, 41.01, 43.27, 43.92, 44.45, 45.29, 42.82,
          44.73, 45.48, 46.78, 48.94, 50.24, 50.84, 50.19, 47.56, 48.25,
          49.76, 49.68, 50.8 , 51.54, 51.2 , 51.94, 52.4 , 53.18, 51.73,
          51.33, 50.51, 51.9 , 52.56, 52.44, 53.29, 53.92, 54.39, 54.33,
          51.99, 50.87, 50.53, 53.  , 52.67, 52.72, 53.6 , 53.33, 53.44,
          49.6 , 51.51, 53.6 , 51.78, 50.74, 47.48, 49.08, 49.32, 49.43,
          49.87, 50.53, 50.59, 48.72, 48.83, 43.64, 48.23, 48.17, 49.92,
          48.26, 50.62, 49.63, 49.77, 50.14, 50.15, 55.8 , 52.37, 54.05,
          56.58, 55.2 , 53.87, 53.8 , 56.61, 56.84, 54.61, 53.44, 53.65,
          53.05, 54.19, 50.99, 49.92, 53.13, 50.19, 49.35, 48.58, 48.91,
          44.41, 39.04, 43.01,  3.27, 21.04, 30.61, 39.25, 34.23, 20.99,
          30.48, 41.79, 19.92, 42.05, 42.76, 42.51, 44.67, 43.34, 45.61,
          45.34, 47.03, 44.08, 41.38, 41.88, 42.4 , 39.36, 19.55,  4.91,
          40.59, 41.44, 41.01, 39.89, 41.37, 40.77, 35.08, 34.58, 30.02,
          41.19, 44.3 , 42.5 , 44.23, 40.99, 39.09, 39.04, 40.23, 41.93,
          42.54, 41.44, 42.1 , 42.99, 43.91, 43.49, 43.8 , 42.69, 39.58,
          38.47, 41.95, 43.68, 45.17, 44.97, 43.78, 45.38, 44.16, 43.48,
          44.65, 47.6 , 49.43, 47.59, 48.16, 47.88, 49.28, 49.5 , 42.4 ,
          42.66, 43.21, 43.23, 42.89, 44.65, 45.06, 46.69, 46.56, 43.81,
          44.25, 43.8 , 44.43, 48.58, 49.21, 49.65, 51.36, 46.47, 49.86,
          52.49, 48.69, 50.12, 48.12, 49.01, 50.47, 52.32, 48.1 ],
         dtype=float32),
   'y_hat': array([26.301683 , 26.385994 , 29.719973 , 28.562244 , 24.831812 ,
          25.609957 , 29.232218 , 31.214191 , 28.805815 , 30.46017  ,
          32.329094 , 28.177994 , 29.08504  , 29.37627  , 28.709423 ,
          28.182892 , 25.147552 , 28.106152 , 28.895327 , 28.837278 ,
          30.69965  , 30.62263  , 30.005648 , 27.746191 , 27.334051 ,
          26.289698 , 27.18077  , 28.588646 , 28.56786  , 28.571556 ,
          27.49182  , 27.87133  , 27.56733  , 27.617666 , 27.934923 ,
          29.383493 , 30.201838 , 28.995504 , 29.295563 , 29.433994 ,
          29.73864  , 30.467342 , 31.073305 , 32.053055 , 32.974697 ,
          31.703629 , 31.32528  , 30.625069 , 30.814648 , 31.497751 ,
          32.399593 , 30.989655 , 29.39724  , 29.095755 , 27.131489 ,
          28.864384 , 27.999271 , 29.95137  , 26.435478 , 27.27806  ,
          29.483934 , 27.690773 , 28.622637 , 27.740705 , 29.827885 ,
          29.88248  , 28.579159 , 29.749945 , 28.115583 , 30.395227 ,
          31.110556 , 32.801716 , 30.533058 , 28.825726 , 30.495033 ,
          28.735376 , 29.210878 , 30.19758  , 25.56111  , 28.493664 ,
          27.340828 , 24.933159 , 28.350052 , 27.145864 , 27.521551 ,
          28.505829 , 28.868546 , 28.749475 , 27.719961 , 27.266512 ,
          27.980768 , 28.735254 , 29.419634 , 29.929089 , 27.345434 ,
          27.183502 , 26.71283  , 27.50426  , 28.41916  , 27.844297 ,
          24.424267 , 25.989256 , 26.223175 , 26.93488  , 25.611753 ,
          23.269585 , 25.79559  , 24.88989  , 26.90884  , 27.912222 ,
          26.303944 , 29.20809  , 29.207264 , 29.985086 , 30.238104 ,
          26.878416 , 23.208168 , 24.946377 , 29.317125 , 29.384333 ,
          31.895458 , 31.444206 , 30.964783 , 29.04924  , 29.379765 ,
          26.863071 , 27.605824 , 28.313808 , 28.265623 , 26.938747 ,
          28.843779 , 29.373808 , 27.495306 , 31.124432 , 32.027832 ,
          32.896824 , 28.329786 , 28.980312 , 29.03841  , 30.416489 ,
          30.52733  , 27.590614 , 26.70111  , 26.164925 , 24.22654  ,
          24.683098 , 24.36169  , 26.132477 , 26.67356  , 25.98287  ,
          25.14618  , 25.262638 , 23.482288 , 24.295095 , 27.090199 ,
          27.065186 , 26.072939 , 26.156172 , 26.318665 , 25.594088 ,
          26.462652 , 26.65578  , 14.43177  , 20.063019 , 26.393261 ,
          25.01522  , 24.40084  , 24.874165 , 21.009958 , 26.030495 ,
          25.336197 , 25.156244 , 21.368816 , 22.412392 , 24.903347 ,
          25.525667 , 24.662008 , 26.73988  , 25.936253 , 23.46792  ,
          22.682297 , 22.073843 , 23.85582  , 25.088251 , 24.113113 ,
          23.2473   , 22.09893  , 23.255821 , 24.428057 , 24.74726  ,
          26.167278 , 26.463844 , 26.143703 , 26.448254 , 25.729246 ,
          27.30203  , 27.381132 , 27.740541 , 26.533176 , 26.374382 ,
          25.450058 , 25.471699 , 25.536266 , 25.2356   , 25.994406 ,
          25.641426 , 26.23631  , 26.5592   , 25.558712 , 26.641691 ,
          26.971245 , 27.383245 , 26.865541 , 27.016823 , 25.45581  ,
          25.89759  , 26.50282  , 27.069826 , 27.105797 , 27.314259 ,
          24.688429 , 22.586237 , 23.912533 , 25.741537 , 26.648317 ,
          25.414057 , 25.99693  , 25.55484  , 24.670004 , 21.13306  ,
          26.181293 , 26.798141 , 26.894331 , 26.958977 , 25.409683 ,
          24.887018 , 22.891273 , 25.907269 , 27.402761 , 28.42978  ,
          27.90912  , 28.3029   , 28.13991  , 28.57349  , 30.310823 ,
          29.94823  , 30.781677 , 29.881418 , 30.86108  , 31.2426   ,
          30.571558 , 31.465317 , 31.49953  , 31.776735 , 29.611559 ,
          30.166388 , 28.27309  , 28.98545  , 27.40707  , 27.890717 ,
          28.769125 , 26.808105 , 27.832306 , 29.329096 , 30.234116 ,
          31.501884 , 31.715801 , 31.798155 , 31.375355 , 29.869392 ,
          29.660313 , 29.482527 , 29.580666 , 29.722141 , 30.055449 ,
          29.300156 , 29.197662 , 26.811733 , 26.800234 , 23.974346 ,
          20.791683 , 24.71351  , 17.59049  , 26.294357 , 27.02798  ,
          26.292387 , 28.981304 , 30.539316 , 29.070307 , 26.965282 ,
          27.167248 , 25.460083 , 25.493454 , 21.864168 , 26.513618 ,
          25.741556 , 28.656208 , 27.431261 , 27.217388 , 25.021454 ,
          28.825619 , 29.693722 , 27.783993 , 27.01975  , 26.98048  ,
          19.311022 , 19.43975  , 20.17363  , 29.056622 , 29.926529 ,
          24.620068 , 28.3019   , 26.077183 , 24.095924 , 28.016808 ,
          28.574186 , 28.655743 , 28.575792 , 25.485815 , 24.71288  ,
          27.279812 , 29.72959  , 30.636625 , 30.053986 , 29.525229 ,
          26.708775 , 26.451082 , 26.84833  , 28.675873 , 31.415545 ,
          30.233858 , 27.559717 , 24.914503 , 27.689648 , 27.183338 ,
          28.471184 , 28.556562 , 30.825163 , 31.296    , 30.182419 ,
          29.915638 , 25.899607 , 27.632896 , 29.61072  , 28.265186 ,
          27.90579  , 24.04475  , 24.919489 , 25.958052 , 27.708214 ,
          28.22754  , 29.038143 , 26.29481  , 27.360825 , 27.827492 ,
          28.743511 , 28.054409 , 30.437695 , 29.37733  , 28.736746 ,
          26.195606 , 25.623108 , 19.398846 , 26.226665 , 25.70171  ,
          25.042982 , 28.286417 , 26.547482 , 25.800913 , 24.189793 ,
          25.272161 , 25.158823 , 27.679653 , 27.524288 , 27.313875 ,
          28.55144  , 27.763447 , 27.612911 , 28.070078 , 28.99372  ,
          30.112095 , 29.696619 , 28.875378 , 27.678574 , 28.517532 ,
          25.893595 , 28.530079 , 29.260492 , 29.900593 , 31.911623 ,
          31.016052 , 30.07782  , 33.158264 , 28.906698 , 26.459808 ,
          28.007494 , 29.168802 , 27.668262 , 28.18971  , 27.944208 ,
          30.42948  , 27.806917 , 28.930664 , 30.744259 , 31.157703 ,
          32.60016  , 34.7588   , 34.35242  , 32.69654  , 30.01577  ,
          30.297987 , 29.033613 , 28.993385 , 31.61465  , 31.476116 ,
          29.680403 , 31.728378 , 31.704103 , 33.779705 , 33.911785 ,
          35.378494 , 37.670605 , 37.3441   , 37.483173 , 35.305557 ,
          36.20087  , 36.210644 , 39.343304 , 38.335365 , 38.231934 ,
          38.62953  , 40.138138 , 37.809296 , 38.20626  , 42.090385 ,
          39.504955 , 37.51156  , 34.81806  , 36.764385 , 35.264023 ,
          35.97805  , 36.393524 , 37.929634 , 37.48696  , 37.405346 ,
          36.09732  , 35.76649  , 36.832882 , 37.49315  , 40.08994  ,
          37.509014 , 40.625248 , 38.376503 , 38.21383  , 38.797527 ,
          42.0986   , 39.84637  , 39.455933 , 39.481556 , 38.33135  ,
          38.69095  , 38.932983 , 38.777412 , 38.83876  , 38.139053 ,
          37.250183 , 36.04578  , 37.342575 , 38.575996 , 38.134995 ,
          37.820915 , 38.43679  , 37.50335  , 36.799576 , 38.94233  ,
          38.67342  , 41.435436 , 40.335148 , 37.582527 , 34.08402  ,
          31.642807 , 34.723244 , 33.895508 , 34.709328 , 35.21404  ,
          34.385345 , 35.537315 , 34.97259  , 35.10784  , 33.455025 ,
          32.244797 , 31.421932 , 35.881176 , 35.24613  , 33.153454 ,
          27.177471 , 27.125383 , 24.801754 , 13.403278 ,  5.403449 ,
          10.271137 , 30.315653 , 20.792187 , 21.792835 , 33.204628 ,
          33.730743 , 25.26076  , 28.361073 , 33.099102 , 31.318775 ,
          15.65436  , 28.124752 , 37.65521  , 36.92495  , 35.977478 ,
          38.35012  , 37.66408  , 37.02532  , 39.043877 , 39.090843 ,
          42.00324  , 40.656452 , 42.96236  , 41.079407 , 40.130787 ,
          42.785065 , 44.78034  , 44.90923  , 45.555824 , 45.842354 ,
          43.94396  , 45.978127 , 44.796345 , 45.766724 , 46.39083  ,
          40.22878  , 43.938698 , 43.094746 , 43.977375 , 41.271507 ,
          42.074215 , 41.41849  , 37.181194 , 36.219307 , 39.725616 ,
          42.872036 , 43.840057 , 44.320496 , 44.737385 , 41.67417  ,
          43.207058 , 44.103256 , 46.235798 , 48.4484   , 49.996555 ,
          50.10521  , 48.861275 , 45.846973 , 46.555676 , 48.868977 ,
          49.138554 , 50.281033 , 50.78413  , 49.785454 , 50.14541  ,
          50.69664  , 52.15864  , 50.98295  , 50.75011  , 49.639004 ,
          50.58988  , 50.928753 , 50.93966  , 52.4733   , 53.21396  ,
          53.774662 , 53.4328   , 50.57188  , 49.07322  , 48.917213 ,
          52.108204 , 52.106125 , 52.334946 , 52.74018  , 52.00271  ,
          51.636982 , 47.983776 , 50.587017 , 52.718933 , 51.21696  ,
          49.888588 , 46.120922 , 47.484356 , 47.7942   , 48.880203 ,
          49.302116 , 50.258904 , 50.018784 , 47.317898 , 47.11763  ,
          42.159977 , 47.452797 , 47.484108 , 49.713394 , 47.62786  ,
          49.355064 , 47.91384  , 48.01965  , 49.183067 , 49.58959  ,
          55.167133 , 52.036987 , 52.62575  , 54.64747  , 53.733673 ,
          52.9229   , 53.04863  , 56.027523 , 55.918465 , 53.09096  ,
          51.724632 , 52.0307   , 51.984688 , 53.48976  , 50.493385 ,
          48.98613  , 51.631573 , 48.58235  , 47.8124   , 47.570644 ,
          48.191315 , 44.047165 , 38.456337 , 41.86176  ,  2.1390512,
          20.437647 , 30.650309 , 40.841824 , 35.633236 , 22.331326 ,
          29.70911  , 41.11425  , 19.705511 , 42.22832  , 42.6336   ,
          43.406055 , 44.190136 , 42.488987 , 44.185287 , 43.77185  ,
          46.508385 , 43.678104 , 40.931362 , 41.13852  , 41.386288 ,
          38.031265 , 18.737638 ,  4.726343 , 40.116776 , 43.45519  ,
          41.884987 , 39.625263 , 39.928123 , 38.756313 , 34.28938  ,
          34.14754  , 30.015583 , 41.439922 , 44.516182 , 41.963642 ,
          43.332035 , 40.085377 , 38.527695 , 38.907673 , 39.619987 ,
          40.813942 , 41.29755  , 40.056656 , 41.36163  , 42.43798  ,
          43.50664  , 42.946648 , 42.62326  , 41.051243 , 38.055992 ,
          37.595978 , 41.416504 , 43.634403 , 44.645374 , 43.879604 ,
          42.051582 , 43.754166 , 43.259754 , 42.90935  , 44.22158  ,
          46.857925 , 47.848866 , 46.375042 , 46.509132 , 46.442554 ,
          48.085342 , 48.76744  , 42.52792  , 40.49239  , 41.38394  ,
          41.657063 , 42.664753 , 44.392166 , 44.618347 , 45.844624 ,
          45.329895 , 42.199257 , 42.71185  , 42.85293  , 43.94978  ,
          48.232002 , 48.335377 , 48.465393 , 49.397606 , 44.71593  ,
          48.545677 , 51.13996  , 48.664326 , 49.233215 , 46.254604 ,
          47.25535  , 48.790386 , 51.34392  ], dtype=float32),
   'run_time': 2.484513998031616,
   'status': 'ok'},
  'misc': {'tid': 0,
   'cmd': ('domain_attachment', 'FMinIter_Domain'),
   'workdir': None,
   'idxs': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0],
    'dropout_prob_theta': [0],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [0],
    'l1_theta': [0],
    'learning_rate': [0],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_freq_downsample': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_pool_kernel_size': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [0],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [0],
    'window_sampling_limit': [0]},
   'vals': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0.2069281539939935],
    'dropout_prob_theta': [0.09522759273514453],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [1],
    'l1_theta': [0],
    'learning_rate': [0.0007396856533221593],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0.488883166981553],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_freq_downsample': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_pool_kernel_size': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [4.0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [13.0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [0],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [0.003853558423761368],
    'window_sampling_limit': [0]}},
  'exp_key': None,
  'owner': None,
  'version': 0,
  'book_time': datetime.datetime(2021, 11, 9, 21, 50, 14, 608000),
  'refresh_time': datetime.datetime(2021, 11, 9, 21, 50, 17, 106000)},
 {'state': 2,
  'tid': 1,
  'spec': None,
  'result': {'loss': 1.9818161725997925,
   'mc': {'activation': 'SELU',
    'batch_normalization': False,
    'batch_size': 256,
    'complete_inputs': False,
    'complete_sample': False,
    'device': 'cpu',
    'dropout_prob_exogenous': 0.45954562927266907,
    'dropout_prob_theta': 0.3377557670885139,
    'early_stop_patience': 16,
    'eval_freq': 50,
    'frequency': 'H',
    'idx_to_sample_freq': 24,
    'initialization': 'glorot_normal',
    'l1_theta': 0,
    'learning_rate': 0.0005455698037098191,
    'len_sample_chunks': None,
    'loss_hypar': 0.5,
    'loss_train': 'MAE',
    'loss_valid': 'MAE',
    'lr_decay': 0.4122236461769361,
    'lr_decay_step_size': 100,
    'max_epochs': 10,
    'max_steps': None,
    'mode': 'simple',
    'model': 'deepmidas',
    'n_blocks': (1, 1),
    'n_freq_downsample': (24, 1),
    'n_hidden': 256,
    'n_layers': (2, 2),
    'n_pool_kernel_size': (4, 1),
    'n_s_hidden': 0,
    'n_series_per_batch': 1,
    'n_time_in': 168,
    'n_time_out': 24,
    'n_val_weeks': 104,
    'n_x_hidden': 10.0,
    'normalizer_x': 'median',
    'normalizer_y': None,
    'random_seed': 14.0,
    'seasonality': 24,
    'shared_weights': False,
    'stack_types': ('identity', 'identity'),
    'val_idx_to_sample_freq': 24,
    'weight_decay': 0.0003991392255810992,
    'window_sampling_limit': 100000,
    'n_x': 1,
    'n_s': 1,
    'n_theta_hidden': [[256, 256], [256, 256]]},
   'y_true': array([25.73, 29.37, 28.76, 25.95, 26.71, 29.36, 30.93, 28.3 , 30.58,
          33.32, 28.86, 30.5 , 29.72, 28.69, 28.  , 25.04, 28.72, 29.79,
          29.41, 30.89, 30.99, 29.56, 27.84, 28.19, 27.36, 27.84, 28.13,
          28.42, 28.4 , 27.41, 28.66, 28.67, 28.51, 27.96, 29.31, 29.99,
          28.97, 29.96, 30.59, 30.8 , 30.62, 31.01, 32.  , 33.14, 32.63,
          32.11, 31.77, 31.04, 31.35, 31.96, 31.01, 29.98, 30.36, 28.15,
          28.87, 27.66, 29.85, 26.38, 27.82, 30.46, 28.49, 28.67, 27.58,
          29.55, 29.93, 29.03, 30.83, 29.08, 30.54, 30.97, 32.54, 30.1 ,
          29.72, 31.56, 29.66, 29.13, 29.94, 25.2 , 28.56, 27.86, 25.65,
          29.17, 27.05, 27.23, 28.04, 28.94, 29.22, 28.55, 28.05, 28.15,
          28.5 , 28.9 , 29.68, 27.87, 28.14, 27.49, 27.38, 28.06, 27.12,
          24.5 , 26.57, 27.03, 27.59, 25.54, 22.99, 25.42, 24.72, 27.11,
          28.73, 27.15, 29.23, 28.95, 29.76, 29.91, 27.27, 24.44, 25.9 ,
          29.06, 28.89, 31.36, 31.24, 31.39, 30.1 , 30.12, 27.03, 27.1 ,
          28.  , 27.99, 27.48, 29.84, 30.25, 27.39, 30.99, 31.73, 32.4 ,
          28.76, 30.05, 30.05, 30.26, 30.21, 26.95, 26.68, 26.74, 25.06,
          25.17, 24.04, 25.8 , 25.6 , 25.65, 25.42, 26.33, 24.14, 24.06,
          26.78, 26.11, 25.61, 26.57, 26.97, 26.21, 26.29, 26.2 , 13.97,
          19.62, 26.43, 24.68, 24.5 , 24.28, 20.64, 25.43, 24.76, 24.89,
          22.06, 23.07, 24.79, 24.81, 24.03, 26.5 , 26.34, 24.42, 23.49,
          21.8 , 23.48, 24.23, 23.69, 23.57, 22.99, 24.14, 24.19, 24.33,
          25.6 , 26.23, 26.55, 27.41, 26.73, 27.4 , 27.12, 27.44, 26.51,
          26.96, 26.51, 26.33, 25.51, 25.02, 25.63, 25.47, 26.65, 27.45,
          26.47, 26.65, 26.77, 27.02, 26.89, 27.46, 26.35, 26.75, 26.36,
          26.42, 26.7 , 27.04, 25.04, 23.65, 24.86, 25.4 , 26.11, 24.57,
          25.86, 25.87, 25.57, 21.97, 26.01, 26.3 , 26.17, 26.85, 25.74,
          25.99, 23.77, 25.79, 27.04, 27.77, 27.7 , 28.65, 29.16, 29.65,
          30.36, 29.78, 30.32, 30.15, 31.4 , 32.27, 31.37, 31.55, 31.47,
          31.56, 29.66, 30.83, 29.27, 29.87, 27.35, 27.67, 28.47, 26.5 ,
          28.19, 30.22, 31.05, 31.52, 31.57, 31.55, 31.55, 30.51, 30.65,
          30.42, 29.68, 29.51, 29.8 , 29.38, 29.81, 27.96, 27.85, 24.06,
          20.41, 24.3 , 17.11, 26.12, 26.91, 26.6 , 28.79, 30.26, 28.56,
          27.11, 27.63, 26.63, 26.03, 21.72, 26.06, 25.26, 28.37, 27.81,
          27.91, 26.  , 28.97, 29.57, 27.19, 27.1 , 27.64, 20.1 , 20.06,
          20.03, 28.5 , 28.95, 23.95, 28.85, 27.1 , 24.96, 28.03, 28.25,
          28.3 , 28.57, 26.03, 26.01, 28.29, 29.71, 30.37, 29.74, 29.66,
          27.57, 27.58, 27.88, 28.67, 31.55, 29.76, 27.01, 26.05, 29.13,
          28.24, 28.01, 28.62, 30.82, 32.18, 30.92, 30.97, 27.9 , 28.24,
          29.83, 27.07, 27.94, 24.64, 26.06, 26.79, 27.52, 28.21, 27.96,
          26.49, 28.26, 29.03, 29.02, 28.59, 30.43, 29.06, 28.  , 27.3 ,
          26.94, 20.08, 25.65, 25.22, 24.51, 28.22, 26.82, 26.68, 25.59,
          25.43, 25.02, 27.55, 27.31, 28.07, 29.66, 29.03, 27.75, 28.11,
          28.6 , 30.37, 30.05, 29.53, 28.79, 28.82, 25.69, 28.05, 29.31,
          30.45, 32.79, 31.64, 30.52, 33.06, 28.08, 25.5 , 28.98, 30.08,
          28.01, 27.57, 28.33, 30.14, 27.7 , 29.53, 31.9 , 32.12, 32.78,
          34.9 , 33.7 , 32.74, 30.91, 31.17, 29.96, 28.87, 31.02, 31.14,
          29.74, 32.22, 32.97, 34.89, 33.77, 35.77, 37.41, 37.34, 38.21,
          36.05, 36.93, 36.48, 38.98, 37.72, 37.73, 39.09, 39.81, 38.41,
          38.17, 40.4 , 38.6 , 37.29, 35.57, 37.9 , 36.03, 35.68, 36.62,
          37.64, 37.75, 37.68, 37.43, 36.79, 37.14, 37.48, 39.6 , 37.88,
          41.43, 39.59, 38.68, 39.54, 42.1 , 39.38, 39.26, 40.87, 39.52,
          39.56, 39.22, 39.22, 39.13, 38.39, 38.09, 37.42, 38.62, 38.68,
          38.01, 37.64, 38.96, 38.5 , 37.96, 40.01, 39.18, 41.55, 39.83,
          38.04, 34.93, 32.64, 35.34, 33.97, 34.21, 34.77, 34.35, 35.93,
          36.02, 35.91, 33.77, 32.13, 31.09, 36.03, 35.26, 34.15, 28.06,
          27.2 , 24.88, 12.42,  4.44,  9.48, 30.11, 19.5 , 20.07, 32.98,
          32.97, 24.98, 29.06, 33.81, 31.77, 15.27, 28.05, 36.81, 36.07,
          36.34, 39.04, 39.12, 37.69, 39.17, 38.85, 42.26, 41.36, 44.16,
          42.2 , 40.66, 43.07, 44.91, 45.17, 46.5 , 47.28, 45.29, 46.73,
          45.1 , 46.11, 46.91, 41.1 , 45.56, 44.46, 44.48, 41.49, 42.22,
          41.97, 38.05, 37.47, 41.01, 43.27, 43.92, 44.45, 45.29, 42.82,
          44.73, 45.48, 46.78, 48.94, 50.24, 50.84, 50.19, 47.56, 48.25,
          49.76, 49.68, 50.8 , 51.54, 51.2 , 51.94, 52.4 , 53.18, 51.73,
          51.33, 50.51, 51.9 , 52.56, 52.44, 53.29, 53.92, 54.39, 54.33,
          51.99, 50.87, 50.53, 53.  , 52.67, 52.72, 53.6 , 53.33, 53.44,
          49.6 , 51.51, 53.6 , 51.78, 50.74, 47.48, 49.08, 49.32, 49.43,
          49.87, 50.53, 50.59, 48.72, 48.83, 43.64, 48.23, 48.17, 49.92,
          48.26, 50.62, 49.63, 49.77, 50.14, 50.15, 55.8 , 52.37, 54.05,
          56.58, 55.2 , 53.87, 53.8 , 56.61, 56.84, 54.61, 53.44, 53.65,
          53.05, 54.19, 50.99, 49.92, 53.13, 50.19, 49.35, 48.58, 48.91,
          44.41, 39.04, 43.01,  3.27, 21.04, 30.61, 39.25, 34.23, 20.99,
          30.48, 41.79, 19.92, 42.05, 42.76, 42.51, 44.67, 43.34, 45.61,
          45.34, 47.03, 44.08, 41.38, 41.88, 42.4 , 39.36, 19.55,  4.91,
          40.59, 41.44, 41.01, 39.89, 41.37, 40.77, 35.08, 34.58, 30.02,
          41.19, 44.3 , 42.5 , 44.23, 40.99, 39.09, 39.04, 40.23, 41.93,
          42.54, 41.44, 42.1 , 42.99, 43.91, 43.49, 43.8 , 42.69, 39.58,
          38.47, 41.95, 43.68, 45.17, 44.97, 43.78, 45.38, 44.16, 43.48,
          44.65, 47.6 , 49.43, 47.59, 48.16, 47.88, 49.28, 49.5 , 42.4 ,
          42.66, 43.21, 43.23, 42.89, 44.65, 45.06, 46.69, 46.56, 43.81,
          44.25, 43.8 , 44.43, 48.58, 49.21, 49.65, 51.36, 46.47, 49.86,
          52.49, 48.69, 50.12, 48.12, 49.01, 50.47, 52.32, 48.1 ],
         dtype=float32),
   'y_hat': array([25.615463  , 25.772533  , 29.425692  , 28.47438   , 24.887777  ,
          25.857983  , 29.377028  , 31.269928  , 28.655613  , 30.685085  ,
          32.748196  , 27.944223  , 29.817093  , 29.841553  , 28.677052  ,
          27.774767  , 24.785257  , 27.50675   , 28.356148  , 28.381443  ,
          31.080103  , 31.827171  , 30.33777   , 27.662582  , 27.685364  ,
          26.176434  , 26.502644  , 28.2672    , 28.665684  , 28.486855  ,
          27.343262  , 27.970427  , 27.332565  , 27.429678  , 27.995882  ,
          29.55727   , 30.03673   , 28.897133  , 29.226908  , 29.360998  ,
          29.837463  , 30.651043  , 31.315989  , 32.098816  , 33.12555   ,
          32.14359   , 30.967531  , 30.671713  , 30.951237  , 31.162687  ,
          31.681482  , 30.895658  , 28.982357  , 29.128283  , 26.928745  ,
          28.439676  , 27.931223  , 29.72227   , 26.147512  , 27.068766  ,
          28.99483   , 27.476185  , 28.712013  , 27.930828  , 29.517368  ,
          29.77248   , 28.38935   , 29.56017   , 28.214035  , 30.47874   ,
          31.307661  , 32.55378   , 30.225008  , 29.187347  , 30.272787  ,
          28.491417  , 28.7513    , 30.044733  , 24.998642  , 28.20006   ,
          26.93852   , 24.032986  , 28.105694  , 27.135696  , 27.324871  ,
          28.177975  , 28.775362  , 28.423258  , 27.296001  , 27.001236  ,
          28.098372  , 28.592718  , 28.837357  , 29.570297  , 27.1181    ,
          26.717503  , 26.288454  , 27.218908  , 28.081684  , 27.125645  ,
          24.391186  , 25.71652   , 25.462467  , 26.6663    , 25.54985   ,
          23.242788  , 25.33383   , 24.40275   , 26.416061  , 27.606457  ,
          26.39907   , 29.398615  , 29.564798  , 29.915598  , 29.783045  ,
          26.81147   , 23.143835  , 24.4727    , 28.627523  , 29.055264  ,
          31.474424  , 31.32962   , 30.908522  , 29.04781   , 29.112915  ,
          26.756252  , 26.925423  , 27.773224  , 27.597942  , 26.653835  ,
          28.542015  , 29.065113  , 27.367723  , 31.26335   , 31.775822  ,
          32.21722   , 28.34542   , 28.798025  , 28.687044  , 29.909882  ,
          30.23095   , 26.93154   , 26.364632  , 25.824368  , 23.302095  ,
          23.852497  , 23.883335  , 26.028713  , 25.608757  , 25.882328  ,
          24.842564  , 25.063372  , 23.386873  , 24.031643  , 27.044615  ,
          26.272587  , 25.858303  , 26.08051   , 25.473581  , 25.286682  ,
          26.493284  , 26.400705  , 13.830498  , 19.123419  , 25.02356   ,
          23.14068   , 23.97559   , 24.911821  , 21.076954  , 25.243551  ,
          24.915035  , 24.375195  , 21.019253  , 22.262316  , 24.552626  ,
          25.280056  , 24.348034  , 26.578117  , 25.709576  , 23.46399   ,
          22.46646   , 21.69936   , 23.679642  , 24.428997  , 23.769035  ,
          23.109516  , 21.707659  , 23.146105  , 24.345762  , 24.678303  ,
          25.795212  , 26.347757  , 26.111753  , 26.46074   , 25.983465  ,
          27.613022  , 27.521753  , 27.581478  , 26.50455   , 26.221478  ,
          25.138538  , 25.292187  , 25.510372  , 25.322248  , 25.564686  ,
          25.359024  , 25.960268  , 26.262594  , 25.568539  , 26.806726  ,
          27.199562  , 27.217247  , 26.874743  , 26.80956   , 25.017002  ,
          25.703432  , 26.300657  , 26.701258  , 27.119226  , 27.23916   ,
          24.222832  , 22.460968  , 23.684414  , 25.238756  , 26.524883  ,
          24.998762  , 26.076963  , 25.37207   , 24.252743  , 21.192995  ,
          25.994003  , 26.6079    , 26.605019  , 27.138607  , 25.235914  ,
          24.661543  , 22.92413   , 25.688995  , 27.240158  , 27.939442  ,
          27.826609  , 28.205109  , 28.14485   , 28.95946   , 30.554611  ,
          30.033928  , 30.461329  , 30.16699   , 30.697884  , 31.011686  ,
          30.412266  , 31.50735   , 31.654314  , 31.489403  , 29.402273  ,
          29.989037  , 27.85457   , 28.650335  , 27.170414  , 27.670418  ,
          28.106796  , 26.232996  , 27.360516  , 28.997831  , 30.056622  ,
          31.732998  , 32.03479   , 31.684967  , 31.462744  , 29.763298  ,
          29.198956  , 29.30827   , 29.468847  , 29.535042  , 29.793459  ,
          29.105768  , 28.991388  , 26.65831   , 26.621048  , 23.705418  ,
          20.286467  , 23.858345  , 16.344296  , 24.705551  , 25.46345   ,
          25.881939  , 29.273941  , 31.02686   , 29.21137   , 27.32811   ,
          26.783995  , 25.160707  , 24.701153  , 21.539726  , 26.088017  ,
          25.139914  , 28.268097  , 27.169378  , 26.860628  , 25.211544  ,
          29.225063  , 29.744753  , 27.386017  , 26.972416  , 26.70347   ,
          18.431986  , 18.766573  , 19.805206  , 28.37337   , 29.079554  ,
          24.39251   , 28.465244  , 25.914698  , 23.896648  , 28.22604   ,
          28.51892   , 28.36377   , 28.885677  , 25.402687  , 24.696182  ,
          27.233574  , 29.65705   , 30.768478  , 30.2053    , 29.666132  ,
          26.954771  , 26.141579  , 26.658857  , 28.489326  , 31.95139   ,
          30.525343  , 26.725529  , 25.29722   , 27.892841  , 26.609915  ,
          27.834713  , 28.769135  , 31.292873  , 33.158813  , 30.937254  ,
          29.07942   , 26.690275  , 28.144571  , 29.559092  , 27.770155  ,
          27.599726  , 24.0817    , 24.492277  , 25.431469  , 27.467722  ,
          28.380697  , 28.301489  , 26.99813   , 27.680225  , 27.776787  ,
          27.784746  , 28.774372  , 31.036139  , 29.19462   , 27.55687   ,
          26.62205   , 25.660587  , 18.515192  , 25.594614  , 25.274363  ,
          24.61545   , 28.411415  , 26.498716  , 25.652449  , 24.970968  ,
          25.614843  , 25.168047  , 27.929813  , 27.290016  , 27.641512  ,
          28.782053  , 28.41271   , 27.988934  , 28.413395  , 28.63491   ,
          30.326183  , 29.611034  , 28.487547  , 27.97772   , 28.777517  ,
          25.427292  , 27.80026   , 28.975279  , 29.495256  , 31.693937  ,
          30.919538  , 30.626495  , 33.651752  , 27.624907  , 24.732662  ,
          28.384567  , 28.45166   , 26.452375  , 27.75144   , 28.715317  ,
          30.259104  , 27.785492  , 28.960524  , 30.636547  , 31.431019  ,
          33.068085  , 35.61859   , 34.098755  , 33.240738  , 30.332846  ,
          29.286846  , 28.599352  , 28.660349  , 30.850357  , 31.433294  ,
          29.550602  , 31.52291   , 31.934425  , 34.044262  , 34.214478  ,
          36.69497   , 37.729748  , 37.59171   , 37.99832   , 34.70367   ,
          35.43099   , 35.927223  , 38.858074  , 37.53868   , 37.75249   ,
          37.99527   , 37.734474  , 37.247997  , 38.883427  , 39.402176  ,
          38.689377  , 37.645153  , 35.12951   , 36.325977  , 34.51689   ,
          35.475815  , 36.41149   , 37.420925  , 37.414024  , 37.246372  ,
          36.17913   , 35.673195  , 36.623577  , 37.284637  , 39.29227   ,
          38.06622   , 40.691     , 38.567734  , 37.22363   , 39.353104  ,
          42.056175  , 38.95328   , 39.375206  , 40.197685  , 37.983948  ,
          38.36313   , 39.033897  , 39.18284   , 38.879753  , 38.053223  ,
          37.16256   , 36.011955  , 37.253178  , 38.19347   , 37.82601   ,
          37.478264  , 38.60073   , 37.499508  , 36.396015  , 38.878376  ,
          38.907715  , 41.405663  , 39.79448   , 37.898087  , 33.71777   ,
          30.65203   , 33.622208  , 33.26093   , 33.821213  , 34.31817   ,
          33.968056  , 34.927547  , 34.792847  , 34.77319   , 33.644646  ,
          32.130726  , 30.478697  , 35.676453  , 34.35791   , 33.124268  ,
          27.02919   , 26.546059  , 24.7311    , 11.524433  ,  3.4738617 ,
           7.900752  , 28.029453  , 18.341265  , 21.058867  , 34.017967  ,
          33.31958   , 25.649763  , 29.315617  , 31.975336  , 30.617785  ,
          15.678229  , 28.266642  , 35.84759   , 36.324287  , 36.611404  ,
          38.226143  , 37.98168   , 38.153076  , 39.331978  , 38.59403   ,
          41.907524  , 40.564735  , 42.84377   , 41.155483  , 40.52741   ,
          42.93306   , 44.669643  , 44.82577   , 45.740383  , 46.06469   ,
          44.059788  , 46.470108  , 44.882664  , 45.62289   , 46.339214  ,
          39.988327  , 44.060844  , 43.05189   , 43.74653   , 41.226955  ,
          41.850307  , 41.258705  , 36.993507  , 35.82323   , 39.710934  ,
          42.769043  , 44.110508  , 44.54949   , 45.219578  , 41.979866  ,
          43.29342   , 44.29626   , 46.508533  , 48.916515  , 50.225536  ,
          50.531765  , 49.547935  , 46.1631    , 46.985363  , 49.192337  ,
          49.338318  , 50.515896  , 51.101036  , 50.30003   , 50.57305   ,
          51.259037  , 52.78511   , 51.56931   , 50.93404   , 49.84329   ,
          50.737694  , 50.975643  , 51.233368  , 52.92453   , 53.764935  ,
          54.049423  , 53.885555  , 50.90894   , 49.22063   , 48.949577  ,
          52.270008  , 52.318394  , 52.242012  , 53.055767  , 52.312305  ,
          51.946     , 48.334953  , 50.973793  , 53.294235  , 51.388737  ,
          50.136864  , 46.32715   , 47.19116   , 47.65766   , 48.90163   ,
          49.689995  , 50.26645   , 50.24049   , 47.976006  , 47.358204  ,
          42.32303   , 47.513863  , 47.753704  , 49.537415  , 47.82758   ,
          49.84778   , 48.228374  , 48.7172    , 49.7377    , 50.05682   ,
          55.48911   , 52.269405  , 53.407883  , 55.39372   , 53.89044   ,
          53.443123  , 53.509716  , 55.96145   , 56.21267   , 53.439686  ,
          51.653187  , 51.96985   , 52.12493   , 53.592     , 50.29677   ,
          49.06039   , 51.722736  , 48.468678  , 47.873287  , 47.980732  ,
          48.438362  , 43.76641   , 38.18752   , 41.424408  ,  0.84666634,
          18.544727  , 29.109684  , 38.609116  , 34.15808   , 21.240166  ,
          28.573755  , 40.565613  , 18.7381    , 43.153564  , 43.019104  ,
          42.778736  , 44.971962  , 43.209732  , 43.868237  , 44.907883  ,
          46.73137   , 43.75372   , 41.147892  , 41.036884  , 41.08758   ,
          37.456573  , 17.743605  ,  3.4229403 , 39.916626  , 40.377197  ,
          41.744156  , 40.103596  , 40.249428  , 40.512413  , 35.56589   ,
          34.270317  , 29.050854  , 39.85968   , 43.060654  , 41.5996    ,
          43.389297  , 41.378944  , 39.226246  , 38.766212  , 39.86093   ,
          40.591206  , 41.12049   , 40.286747  , 41.84652   , 42.84527   ,
          43.762745  , 43.183216  , 42.91275   , 41.326336  , 38.16428   ,
          37.737038  , 41.553543  , 43.13719   , 44.89828   , 44.17803   ,
          42.516514  , 44.10824   , 43.75467   , 43.18504   , 44.111523  ,
          47.139973  , 48.649235  , 46.52467   , 46.72194   , 47.468327  ,
          49.29869   , 48.929783  , 41.989796  , 41.430077  , 41.500683  ,
          41.214542  , 42.02893   , 44.41208   , 44.561897  , 46.220974  ,
          45.73678   , 42.406     , 42.847534  , 43.138905  , 44.142406  ,
          48.122456  , 49.05447   , 48.97362   , 50.07043   , 45.49514   ,
          49.0805    , 52.061825  , 48.176144  , 49.257343  , 47.432262  ,
          46.92142   , 48.769585  , 51.78891   ], dtype=float32),
   'run_time': 2.906038999557495,
   'status': 'ok'},
  'misc': {'tid': 1,
   'cmd': ('domain_attachment', 'FMinIter_Domain'),
   'workdir': None,
   'idxs': {'activation': [1],
    'batch_normalization': [1],
    'batch_size': [1],
    'complete_inputs': [1],
    'complete_sample': [1],
    'device': [1],
    'dropout_prob_exogenous': [1],
    'dropout_prob_theta': [1],
    'early_stop_patience': [1],
    'eval_freq': [1],
    'frequency': [1],
    'idx_to_sample_freq': [1],
    'initialization': [1],
    'l1_theta': [1],
    'learning_rate': [1],
    'len_sample_chunks': [1],
    'loss': [1],
    'loss_hypar': [1],
    'loss_valid': [1],
    'lr_decay': [1],
    'lr_decay_step_size': [1],
    'max_epochs': [1],
    'max_steps': [1],
    'n_blocks': [1],
    'n_freq_downsample': [1],
    'n_hidden': [1],
    'n_layers': [1],
    'n_pool_kernel_size': [1],
    'n_s_hidden': [1],
    'n_series_per_batch': [1],
    'n_time_in': [1],
    'n_time_out': [1],
    'n_val_weeks': [1],
    'n_x_hidden': [1],
    'normalizer_x': [1],
    'normalizer_y': [1],
    'random_seed': [1],
    'seasonality': [1],
    'shared_weights': [1],
    'stack_types': [1],
    'val_idx_to_sample_freq': [1],
    'weight_decay': [1],
    'window_sampling_limit': [1]},
   'vals': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0.45954562927266907],
    'dropout_prob_theta': [0.3377557670885139],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [0],
    'l1_theta': [0],
    'learning_rate': [0.0005455698037098191],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0.4122236461769361],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_freq_downsample': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_pool_kernel_size': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [10.0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [14.0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [0],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [0.0003991392255810992],
    'window_sampling_limit': [0]}},
  'exp_key': None,
  'owner': None,
  'version': 0,
  'book_time': datetime.datetime(2021, 11, 9, 21, 50, 17, 126000),
  'refresh_time': datetime.datetime(2021, 11, 9, 21, 50, 20, 48000)}]
{% endraw %}