A basic architecture for time series forecasting.
lookback = 7
y, y_hat = torch.arange(10).float()[None,None,:], torch.arange(10)[None,None,:].float()+torch.randn(10)
loss_fnc = CombinedLoss(F.mse_loss,lookback)
test_eq(F.mse_loss(y[:,:,:7],y_hat[:,:,:7])+F.mse_loss(y[:,:,7:],y_hat[:,:,7:]),loss_fnc(y, y_hat))
r = 10
loss_fnc = CombinedLoss(F.mse_loss, lookback, ratio = [1,r])
loss = loss_fnc(y, y_hat)
test_eq(F.mse_loss(y[:,:,:7],y_hat[:,:,:7])+F.mse_loss(y[:,:,7:],y_hat[:,:,7:])*r,loss)
horizon, lookback = 10,40
path = untar_data(URLs.m4_daily)
data = TSDataLoaders.from_folder(path, horizon = horizon, lookback = lookback, nrows = 3, step=3)
mdl = NBeatsNet(device = data.train.device, horizon=horizon, lookback=lookback, layers = [100])
loss_func = CombinedLoss(F.mse_loss, lookback)
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam,)
learn.loss_func
learn.fit(2,.1)
path = untar_data(URLs.m4_daily)
data = TSDataLoaders.from_folder(path, horizon = horizon, lookback = lookback, nrows = 1, step=3, )
mdl = NBeatsNet(device = data.train.device, horizon=horizon, lookback=lookback, layers = [100])
loss_func = F.mse_loss
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam, metrics=L(None)+L(mae, smape,
NBeatsBackward(lookback), NBeatsForward(lookback)),)
learn.fit(3, .1)
learn.show_results()
path = untar_data(URLs.m4_daily)
data = TSDataLoaders.from_folder(path, horizon = horizon, lookback = lookback, nrows = 1, step=3, )
mdl = NBeatsNet(device = data.train.device, horizon=horizon, lookback=lookback, layers = [100])
loss_func = F.mse_loss
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam, metrics=L(None)+L(mae, smape,
BackwardSMAPE(lookback), ForwardSMAPE(lookback)),)
learn.fit(3, .1)
dct = {'foo':{'bar':1},'bar':2,'foo2':{'foo3':3},'ignore':{'bar':1000}}
r = _get_key_from_nested_dct(dct,'bar',['ignore'])
test_eq(r,{'foobar': 1, 'bar': 2})
horizon, lookback = 7,10
path = untar_data(URLs.m4_daily)
data = TSDataLoaders.from_folder(path, horizon = horizon, lookback = lookback, nrows = 1, step=3, max_std=5)
mdl = NBeatsNet(device = data.train.device, horizon=horizon, lookback=lookback, layers = [100])
loss_func = F.mse_loss
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam, metrics=[NBeatsTheta()],
)
learn.fit(3,.1)
test_eq(type(learn.metrics[0].value),Tensor)
horizon, lookback = 7,10
items = L(np.arange(-5,100)[None,:],np.arange(500,550)[None,:],np.arange(-110,-56)[None,:]).map(tensor)
data = TSDataLoaders.from_items(items, horizon = horizon, lookback=lookback, step=1, after_batch = NormalizeTS()
)
mdl = NBeatsNet(device = data.train.device,horizon=horizon, lookback=lookback)
loss_func = F.mse_loss
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam, metrics=L(mae, smape, NBeatsTheta(),
NBeatsBackward(lookback), NBeatsForward(lookback)),
cbs=L( NBeatsAttention()
)
)
learn.fit(3,.1)
df = learn.n_beats_attention.means()
df
learn.show_results()
horizon, lookback = 2,10
items = L(np.arange(-5,30)[None,:],np.arange(50)[None,:]).map(tensor)
items[-1][:,-8:-5] = 1e10
data = TSDataLoaders.from_items(items, horizon = horizon, lookback=lookback, step=1, after_batch = NormalizeTS()
)
mdl = NBeatsNet(device = data.train.device,horizon=horizon, lookback=lookback)
loss_func = F.mse_loss
learn = Learner(data, mdl, loss_func=loss_func, opt_func= Adam,
cbs=L(ClipLoss()
)
)
learn.fit(10,.1)
learn.recorder.plot_loss()
learn.show_results()