diff --git a/Utils/trunk/EQM/PWR_MOX_MLP/Train_MLP.cxx b/Utils/trunk/EQM/PWR_MOX_MLP/Train_MLP.cxx index e56077606fd2072922174b43a275b430d07e419b..e701f0dc07eeff2deaa2c7f8ff85319c4d5ebdc6 100755 --- a/Utils/trunk/EQM/PWR_MOX_MLP/Train_MLP.cxx +++ b/Utils/trunk/EQM/PWR_MOX_MLP/Train_MLP.cxx @@ -1,6 +1,7 @@ // // This program train and test a MLP using TMVA from a training data in a form of a TTRee // +//@author Root_tmva_team modified by BaL #include <cstdlib> #include <iostream> #include <map> diff --git a/Utils/trunk/MURE2CLASS/MURE2CLASS.cxx b/Utils/trunk/MURE2CLASS/MURE2CLASS.cxx index f3a88d4e621fd4a25c181269bea8ff6a3ef5d25f..559037d60c7f1a015ba1cf9835c796e69bd35413 100755 --- a/Utils/trunk/MURE2CLASS/MURE2CLASS.cxx +++ b/Utils/trunk/MURE2CLASS/MURE2CLASS.cxx @@ -1,3 +1,7 @@ +// DESCRIPTION +// This program convert MURE output tp EvolutionData forma +// +//@author BaM #include "BinaryFormat2.hxx" #include "StringLine.hxx" #include "ZAI.hxx" diff --git a/Utils/trunk/XSM/MLP/BuildInput/Gene.cxx b/Utils/trunk/XSM/MLP/BuildInput/Gene.cxx index f33d3a8858aba503160f47be743888a9f8f5e425..1bf019439efa62b5cba26bd45a44131ff5124dc3 100755 --- a/Utils/trunk/XSM/MLP/BuildInput/Gene.cxx +++ b/Utils/trunk/XSM/MLP/BuildInput/Gene.cxx @@ -1,5 +1,5 @@ /**********************************************************/ -// Make the input file for the MLPs training +// Make the input file for the MLPs training // // This programs reads a set of .dat files which are the // results of a depletion calculation (see manual and @@ -8,6 +8,8 @@ // TrainingInput.root . // The file TrainingInput.cxx is the list of MLP outputs // (cross sections) +// +//@author BaM, BaL /**********************************************************/ #include "Gene.hxx" #include <TH1F.h> diff --git a/Utils/trunk/XSM/MLP/Train/LaunchTraining.sh b/Utils/trunk/XSM/MLP/Train/LaunchTraining.sh index 2762e35c462695613257dee6b6e2c5f5b030f007..b41b382885247d0a88b1aafcacb7bffa2835551b 100755 --- a/Utils/trunk/XSM/MLP/Train/LaunchTraining.sh +++ b/Utils/trunk/XSM/MLP/Train/LaunchTraining.sh @@ -1,5 +1,8 @@ #!/bin/bash +# Script to train MLPs +#@author BaL +# echo "--------------------------" echo "--- Run Training from MLP $1 to $2 ---" @@ -8,9 +11,7 @@ echo "--------------------------" #LigneDeDepart=$((0)) #NbreSimu=$((700)) LigneDeDepart=$1 -NbreSimu=$2 - -LigneFinal=$(( ${LigneDeDepart} + ${NbreSimu} )) +LigneFinal=$2 echo LigneDeDepart $LigneDeDepart LigneFinal $LigneFinal exclu diff --git a/Utils/trunk/XSM/MLP/Train/Train_XS.cxx b/Utils/trunk/XSM/MLP/Train/Train_XS.cxx index fb873034ee3e40effe4e2a45dbf7229affb276e6..2d79051d7ddae5e4d84c10e21400442774d5c343 100755 --- a/Utils/trunk/XSM/MLP/Train/Train_XS.cxx +++ b/Utils/trunk/XSM/MLP/Train/Train_XS.cxx @@ -2,6 +2,7 @@ // Train one MLP "INDICE" from the //file ../BuildInput/TrainingInput.root // +//@author Root_tmva_Team modified by BaL /***********************************/ #include <cstdlib>