diff --git a/Utils/XSM/MLP/Generate/Generate_XSM.cxx b/Utils/XSM/MLP/Generate/Generate_XSM.cxx
index 5a823307955bc36407cced5520a2cef163f32f60..213fe2f438b0d7d4249676b94b66689e3cf2f158 100644
--- a/Utils/XSM/MLP/Generate/Generate_XSM.cxx
+++ b/Utils/XSM/MLP/Generate/Generate_XSM.cxx
@@ -455,8 +455,8 @@ void Generate_tmva_factory_input(double ProportionOfTraining)
 
 	ofstream  InputNetwork("_tmp/include_Train_XS/InputVariables.cxx");
 	for(map<ZAI,string>::iterator it = fMapName.begin() ; it != fMapName.end() ; it++ )
-		InputNetwork <<"factory->AddVariable( \"" << it->second  << "\" , \"" << it->second << "\", \"IsotopicFraction\", 'F' );"<<endl; 
-    InputNetwork <<"factory->AddVariable( \"Time\" , \"Time\"     , \"seconds\", 'F' );"<<endl<<endl;
+		InputNetwork <<"dataloader->AddVariable( \"" << it->second  << "\" , \"" << it->second << "\", \"IsotopicFraction\", 'F' );"<<endl; 
+    InputNetwork <<"dataloader->AddVariable( \"Time\" , \"Time\"     , \"seconds\", 'F' );"<<endl<<endl;
 
 	ProportionOfTraining /=100;
     InputNetwork <<"double PropTraining = "<< ProportionOfTraining << ";" << endl;
@@ -744,7 +744,7 @@ void DumpInputNeuron(string filename)
 	TTree*   fOutT = new TTree("Data", "Data");
 
 
-/**********************INITIALISATIONNN********************/
+/**********************INITIALISATION**********************/
 
 	////////////////////////////////////////////////////////
 	// INIT FRESH FUEL COMPOSITION and TIME
@@ -965,7 +965,7 @@ void ReadAndFill(string jobname)
 	
 	getline(DecayDB, line);
 	
-	/******Getting Time vecotr ....******/
+	/******Getting Time vector ....******/
 	if( StringLine::NextWord(line, start, ' ') != "time")
 	{
 		cout << "\033[31m!!Bad Trouble!! !!!EvolutiveProduct!!! Bad Database file : " <<  jobname << "\033[0m" << endl;
diff --git a/Utils/XSM/MLP/Generate/Train_XS.cxx b/Utils/XSM/MLP/Generate/Train_XS.cxx
index f54c9d10718d347a5f19b20a57dc11b3e4151071..60003976fbb0d4e3e7f2785a424638d4319b7001 100644
--- a/Utils/XSM/MLP/Generate/Train_XS.cxx
+++ b/Utils/XSM/MLP/Generate/Train_XS.cxx
@@ -20,10 +20,13 @@
 #include "TObjString.h"
 #include "TSystem.h"
 #include "TROOT.h"
+#include <stdio.h>
+#include <stdlib.h>
 
 #if not defined(__CINT__) || defined(__MAKECINT__)
 #include "TMVA/Tools.h"
 #include "TMVA/Factory.h"
+#include "TMVA/DataLoader.h"
 #endif
 
 using namespace TMVA;
@@ -68,12 +71,13 @@ void Train_XS_Time(int INDICE)
 
 
    TMVA::Factory *factory = new TMVA::Factory( "TMVARegression",    OUTPUTFile, 
-                                               "!V:!Silent:Color:DrawProgressBar" );
-
+                                               "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
+   
+   TMVA::DataLoader *dataloader = new TMVA::DataLoader("dataset");
 
    #include "_tmp/include_Train_XS/InputVariables.cxx"
    // Add the variable carrying the regression target
-   factory->AddTarget(   OUTPUT[INDICE].c_str() ); //The name of the MLP output
+   dataloader->AddTarget(   OUTPUT[INDICE].c_str() ); //The name of the MLP output
 
    // It is also possible to declare additional targets for multi-dimensional regression, ie:
    // -- factory->AddTarget( "fvalue2" );
@@ -101,7 +105,7 @@ void Train_XS_Time(int INDICE)
    Double_t regWeight  = 1.0;   
 
    // You can add an arbitrary number of regression trees
-   factory->AddRegressionTree( regTree, regWeight );
+   dataloader->AddRegressionTree( regTree, regWeight );
 
    // Apply additional cuts on the signal and background samples (can be different)
    TCut mycut = ""; // for example: TCut mycut = "abs(var1)<0.5 && abs(var2-0.5)<1";
@@ -114,7 +118,7 @@ void Train_XS_Time(int INDICE)
    Samples_Parameters <<  "nTrain_Regression=" << NTraining <<":"<< "nTest_Regression=" << NTesting <<":SplitMode=Random:NormMode=NumEvents:!V";
 
    // tell the factory to use all remaining events in the trees after training for testing:
-   factory->PrepareTrainingAndTestTree( mycut, Samples_Parameter.str() );
+   dataloader->PrepareTrainingAndTestTree( mycut, Samples_Parameters.str() );
 
    // If no numbers of events are given, half of the events in the tree are used 
    // for training, and the other half for testing:
@@ -130,7 +134,7 @@ void Train_XS_Time(int INDICE)
    std::stringstream Name;
    Name <<  OUTPUT[INDICE];
    // Neural network (MLP)                                                                                    
-      factory->BookMethod( TMVA::Types::kMLP, Name.str().c_str(), "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N,N:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
+      factory->BookMethod( dataloader, TMVA::Types::kMLP, Name.str().c_str(), "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N,N:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
 
    // --------------------------------------------------------------------------------------------------