Skip to content
Snippets Groups Projects
Commit 7586699b authored by Doligez's avatar Doligez
Browse files

Fix XS Utils

parent a45f1fce
No related branches found
No related tags found
No related merge requests found
...@@ -455,8 +455,8 @@ void Generate_tmva_factory_input(double ProportionOfTraining) ...@@ -455,8 +455,8 @@ void Generate_tmva_factory_input(double ProportionOfTraining)
ofstream InputNetwork("_tmp/include_Train_XS/InputVariables.cxx"); ofstream InputNetwork("_tmp/include_Train_XS/InputVariables.cxx");
for(map<ZAI,string>::iterator it = fMapName.begin() ; it != fMapName.end() ; it++ ) for(map<ZAI,string>::iterator it = fMapName.begin() ; it != fMapName.end() ; it++ )
InputNetwork <<"factory->AddVariable( \"" << it->second << "\" , \"" << it->second << "\", \"IsotopicFraction\", 'F' );"<<endl; InputNetwork <<"dataloader->AddVariable( \"" << it->second << "\" , \"" << it->second << "\", \"IsotopicFraction\", 'F' );"<<endl;
InputNetwork <<"factory->AddVariable( \"Time\" , \"Time\" , \"seconds\", 'F' );"<<endl<<endl; InputNetwork <<"dataloader->AddVariable( \"Time\" , \"Time\" , \"seconds\", 'F' );"<<endl<<endl;
ProportionOfTraining /=100; ProportionOfTraining /=100;
InputNetwork <<"double PropTraining = "<< ProportionOfTraining << ";" << endl; InputNetwork <<"double PropTraining = "<< ProportionOfTraining << ";" << endl;
...@@ -744,7 +744,7 @@ void DumpInputNeuron(string filename) ...@@ -744,7 +744,7 @@ void DumpInputNeuron(string filename)
TTree* fOutT = new TTree("Data", "Data"); TTree* fOutT = new TTree("Data", "Data");
/**********************INITIALISATIONNN********************/ /**********************INITIALISATION**********************/
//////////////////////////////////////////////////////// ////////////////////////////////////////////////////////
// INIT FRESH FUEL COMPOSITION and TIME // INIT FRESH FUEL COMPOSITION and TIME
...@@ -965,7 +965,7 @@ void ReadAndFill(string jobname) ...@@ -965,7 +965,7 @@ void ReadAndFill(string jobname)
getline(DecayDB, line); getline(DecayDB, line);
/******Getting Time vecotr ....******/ /******Getting Time vector ....******/
if( StringLine::NextWord(line, start, ' ') != "time") if( StringLine::NextWord(line, start, ' ') != "time")
{ {
cout << "\033[31m!!Bad Trouble!! !!!EvolutiveProduct!!! Bad Database file : " << jobname << "\033[0m" << endl; cout << "\033[31m!!Bad Trouble!! !!!EvolutiveProduct!!! Bad Database file : " << jobname << "\033[0m" << endl;
......
...@@ -20,10 +20,13 @@ ...@@ -20,10 +20,13 @@
#include "TObjString.h" #include "TObjString.h"
#include "TSystem.h" #include "TSystem.h"
#include "TROOT.h" #include "TROOT.h"
#include <stdio.h>
#include <stdlib.h>
#if not defined(__CINT__) || defined(__MAKECINT__) #if not defined(__CINT__) || defined(__MAKECINT__)
#include "TMVA/Tools.h" #include "TMVA/Tools.h"
#include "TMVA/Factory.h" #include "TMVA/Factory.h"
#include "TMVA/DataLoader.h"
#endif #endif
using namespace TMVA; using namespace TMVA;
...@@ -68,12 +71,13 @@ void Train_XS_Time(int INDICE) ...@@ -68,12 +71,13 @@ void Train_XS_Time(int INDICE)
TMVA::Factory *factory = new TMVA::Factory( "TMVARegression", OUTPUTFile, TMVA::Factory *factory = new TMVA::Factory( "TMVARegression", OUTPUTFile,
"!V:!Silent:Color:DrawProgressBar" ); "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
TMVA::DataLoader *dataloader = new TMVA::DataLoader("dataset");
#include "_tmp/include_Train_XS/InputVariables.cxx" #include "_tmp/include_Train_XS/InputVariables.cxx"
// Add the variable carrying the regression target // Add the variable carrying the regression target
factory->AddTarget( OUTPUT[INDICE].c_str() ); //The name of the MLP output dataloader->AddTarget( OUTPUT[INDICE].c_str() ); //The name of the MLP output
// It is also possible to declare additional targets for multi-dimensional regression, ie: // It is also possible to declare additional targets for multi-dimensional regression, ie:
// -- factory->AddTarget( "fvalue2" ); // -- factory->AddTarget( "fvalue2" );
...@@ -101,7 +105,7 @@ void Train_XS_Time(int INDICE) ...@@ -101,7 +105,7 @@ void Train_XS_Time(int INDICE)
Double_t regWeight = 1.0; Double_t regWeight = 1.0;
// You can add an arbitrary number of regression trees // You can add an arbitrary number of regression trees
factory->AddRegressionTree( regTree, regWeight ); dataloader->AddRegressionTree( regTree, regWeight );
// Apply additional cuts on the signal and background samples (can be different) // Apply additional cuts on the signal and background samples (can be different)
TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1"; TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
...@@ -114,7 +118,7 @@ void Train_XS_Time(int INDICE) ...@@ -114,7 +118,7 @@ void Train_XS_Time(int INDICE)
Samples_Parameters << "nTrain_Regression=" << NTraining <<":"<< "nTest_Regression=" << NTesting <<":SplitMode=Random:NormMode=NumEvents:!V"; Samples_Parameters << "nTrain_Regression=" << NTraining <<":"<< "nTest_Regression=" << NTesting <<":SplitMode=Random:NormMode=NumEvents:!V";
// tell the factory to use all remaining events in the trees after training for testing: // tell the factory to use all remaining events in the trees after training for testing:
factory->PrepareTrainingAndTestTree( mycut, Samples_Parameter.str() ); dataloader->PrepareTrainingAndTestTree( mycut, Samples_Parameters.str() );
// If no numbers of events are given, half of the events in the tree are used // If no numbers of events are given, half of the events in the tree are used
// for training, and the other half for testing: // for training, and the other half for testing:
...@@ -130,7 +134,7 @@ void Train_XS_Time(int INDICE) ...@@ -130,7 +134,7 @@ void Train_XS_Time(int INDICE)
std::stringstream Name; std::stringstream Name;
Name << OUTPUT[INDICE]; Name << OUTPUT[INDICE];
// Neural network (MLP) // Neural network (MLP)
factory->BookMethod( TMVA::Types::kMLP, Name.str().c_str(), "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N,N:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" ); factory->BookMethod( dataloader, TMVA::Types::kMLP, Name.str().c_str(), "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N,N:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
// -------------------------------------------------------------------------------------------------- // --------------------------------------------------------------------------------------------------
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment