From f66e511624d0fe9a48a4ba72348a3ed8e3b60f02 Mon Sep 17 00:00:00 2001 From: Simon Pfreundschuh Date: Mon, 17 Oct 2016 02:52:51 +0200 Subject: [PATCH] Disabled log output in interactive mode. --- tmva/tmva/src/MethodDNN.cxx | 80 +++++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 34 deletions(-) diff --git a/tmva/tmva/src/MethodDNN.cxx b/tmva/tmva/src/MethodDNN.cxx index ec414a92a3c61..e23a3a269f6d1 100644 --- a/tmva/tmva/src/MethodDNN.cxx +++ b/tmva/tmva/src/MethodDNN.cxx @@ -803,13 +803,15 @@ void TMVA::MethodDNN::TrainGpu() std::chrono::time_point start, end; start = std::chrono::system_clock::now(); - Log() << std::setw(10) << "Epoch" << " | " - << std::setw(12) << "Train Err." - << std::setw(12) << "Test Err." - << std::setw(12) << "GFLOP/s" - << std::setw(12) << "Conv. Steps" << Endl; - std::string separator(62, '-'); - Log() << separator << Endl; + if (!fInteractive) { + Log() << std::setw(10) << "Epoch" << " | " + << std::setw(12) << "Train Err." + << std::setw(12) << "Test Err." + << std::setw(12) << "GFLOP/s" + << std::setw(12) << "Conv. Steps" << Endl; + std::string separator(62, '-'); + Log() << separator << Endl; + } while (!converged) { @@ -852,12 +854,6 @@ void TMVA::MethodDNN::TrainGpu() } trainingError /= (Double_t) (nTrainingSamples / settings.batchSize); - if (fInteractive){ - fInteractive->AddPoint(stepCount, trainingError, testError); - fIPyCurrentIter = 100*(double)minimizer.GetConvergenceCount() /(double)settings.convergenceSteps; - if (fExitFromTraining) break; - } - // Compute numerical throughput. std::chrono::duration elapsed_seconds = end - start; double seconds = elapsed_seconds.count(); @@ -867,13 +863,20 @@ void TMVA::MethodDNN::TrainGpu() converged = minimizer.HasConverged(testError); start = std::chrono::system_clock::now(); - Log() << std::setw(10) << stepCount << " | " - << std::setw(12) << trainingError - << std::setw(12) << testError - << std::setw(12) << nFlops / seconds - << std::setw(12) << minimizer.GetConvergenceCount() << Endl; - if (converged) { - Log() << Endl; + if (fInteractive) { + fInteractive->AddPoint(stepCount, trainingError, testError); + fIPyCurrentIter = 100.0 * minimizer.GetConvergenceCount() + / minimizer.GetConvergenceSteps (); + if (fExitFromTraining) break; + } else { + Log() << std::setw(10) << stepCount << " | " + << std::setw(12) << trainingError + << std::setw(12) << testError + << std::setw(12) << nFlops / seconds + << std::setw(12) << minimizer.GetConvergenceCount() << Endl; + if (converged) { + Log() << Endl; + } } } } @@ -970,13 +973,15 @@ void TMVA::MethodDNN::TrainCpu() std::chrono::time_point start, end; start = std::chrono::system_clock::now(); - Log() << std::setw(10) << "Epoch" << " | " - << std::setw(12) << "Train Err." - << std::setw(12) << "Test Err." - << std::setw(12) << "GFLOP/s" - << std::setw(12) << "Conv. Steps" << Endl; - std::string separator(62, '-'); - Log() << separator << Endl; + if (fInteractive) { + Log() << std::setw(10) << "Epoch" << " | " + << std::setw(12) << "Train Err." + << std::setw(12) << "Test Err." + << std::setw(12) << "GFLOP/s" + << std::setw(12) << "Conv. Steps" << Endl; + std::string separator(62, '-'); + Log() << separator << Endl; + } while (!converged) { @@ -1033,13 +1038,20 @@ void TMVA::MethodDNN::TrainCpu() converged = minimizer.HasConverged(testError); start = std::chrono::system_clock::now(); - Log() << std::setw(10) << stepCount << " | " - << std::setw(12) << trainingError - << std::setw(12) << testError - << std::setw(12) << nFlops / seconds - << std::setw(12) << minimizer.GetConvergenceCount() << Endl; - if (converged) { - Log() << Endl; + if (fInteractive) { + fInteractive->AddPoint(stepCount, trainingError, testError); + fIPyCurrentIter = 100.0 * minimizer.GetConvergenceCount() + / minimizer.GetConvergenceSteps (); + if (fExitFromTraining) break; + } else { + Log() << std::setw(10) << stepCount << " | " + << std::setw(12) << trainingError + << std::setw(12) << testError + << std::setw(12) << nFlops / seconds + << std::setw(12) << minimizer.GetConvergenceCount() << Endl; + if (converged) { + Log() << Endl; + } } } }