From 0abaf3b726c352684c90681138f88b9a54c2673c Mon Sep 17 00:00:00 2001 From: rgayatri Date: Mon, 17 Oct 2022 14:41:09 +0200 Subject: [PATCH] add softmax to the final output --- deeprank/learn/NeuralNet.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/deeprank/learn/NeuralNet.py b/deeprank/learn/NeuralNet.py index 818dc07..bbf9200 100644 --- a/deeprank/learn/NeuralNet.py +++ b/deeprank/learn/NeuralNet.py @@ -666,6 +666,7 @@ def _epoch(self, epoch_number, pass_name, data_loader, train_model): entry_names = [] output_values = [] + output_probs = [] target_values = [] for batch_index, batch in enumerate(data_loader): @@ -707,8 +708,10 @@ def _epoch(self, epoch_number, pass_name, data_loader, train_model): output_values += outputs.tolist() target_values += targets.tolist() + + output_probs = F.softmax(torch.FloatTensor(output_values), dim=0).tolist() - self._metrics_output.process(pass_name, epoch_number, entry_names, output_values, target_values) + self._metrics_output.process(pass_name, epoch_number, entry_names, output_probs, target_values) if count_data_entries > 0: epoch_loss = sum_of_losses / count_data_entries