Rewrote dataset to be able to include new features
This commit is contained in:
@@ -20,10 +20,6 @@ class QuantileTrainer(AutoRegressiveTrainer):
|
||||
criterion = PinballLoss(quantiles=quantiles_tensor)
|
||||
super().__init__(model=model, optimizer=optimizer, criterion=criterion, data_processor=data_processor, device=device, clearml_helper=clearml_helper, debug=debug)
|
||||
|
||||
def predict_auto_regressive(self, initial_sequence: torch.Tensor, sequence_length: int = 96):
|
||||
initial_sequence = initial_sequence.to(self.device)
|
||||
|
||||
return predict_auto_regressive_quantile(self.model, self.sample_from_dist, initial_sequence, self.quantiles, sequence_length)
|
||||
|
||||
def log_final_metrics(self, task, dataloader, train: bool = True):
|
||||
metrics = { metric.__class__.__name__: 0.0 for metric in self.metrics_to_track }
|
||||
@@ -84,6 +80,52 @@ class QuantileTrainer(AutoRegressiveTrainer):
|
||||
fig.update_layout(title="Predictions and Quantiles of the Linear Model", showlegend=show_legend)
|
||||
|
||||
return fig
|
||||
|
||||
def auto_regressive(self, data_loader, idx, sequence_length: int = 96):
|
||||
self.model.eval()
|
||||
target_full = []
|
||||
predictions_sampled = []
|
||||
predictions_full = []
|
||||
|
||||
prev_features, target = data_loader.dataset[idx]
|
||||
prev_features = prev_features.to(self.device)
|
||||
|
||||
initial_sequence = prev_features[:96]
|
||||
|
||||
target_full.append(target)
|
||||
with torch.no_grad():
|
||||
prediction = self.model(prev_features.unsqueeze(0))
|
||||
predictions_full.append(prediction.squeeze(0))
|
||||
|
||||
# sample from the distribution
|
||||
sample = self.sample_from_dist(self.quantiles.cpu(), prediction.squeeze(-1).cpu().numpy())
|
||||
predictions_sampled.append(sample)
|
||||
|
||||
for i in range(sequence_length - 1):
|
||||
new_features = torch.cat((prev_features[1:97].cpu(), torch.tensor([predictions_sampled[-1]])), dim=0)
|
||||
new_features = new_features.float()
|
||||
|
||||
# get the other needed features
|
||||
other_features, new_target = data_loader.dataset.random_day_autoregressive(idx + i + 1)
|
||||
|
||||
if other_features is not None:
|
||||
prev_features = torch.cat((new_features, other_features), dim=0)
|
||||
else:
|
||||
prev_features = new_features
|
||||
|
||||
# add target to target_full
|
||||
target_full.append(new_target)
|
||||
|
||||
# predict
|
||||
with torch.no_grad():
|
||||
prediction = self.model(new_features.unsqueeze(0).to(self.device))
|
||||
predictions_full.append(prediction.squeeze(0))
|
||||
|
||||
# sample from the distribution
|
||||
sample = self.sample_from_dist(self.quantiles.cpu(), prediction.squeeze(-1).cpu().numpy())
|
||||
predictions_sampled.append(sample)
|
||||
|
||||
return initial_sequence.cpu(), torch.stack(predictions_full).cpu(), torch.stack(target_full).cpu()
|
||||
|
||||
@staticmethod
|
||||
def sample_from_dist(quantiles, output_values):
|
||||
|
||||
Reference in New Issue
Block a user