Back to home page

sPhenix code displayed by LXR

 
 

    


File indexing completed on 2025-08-06 08:13:52

0001 #include <cstdlib>
0002 #include <iostream>
0003 #include <map>
0004 #include <string>
0005 
0006 #include "TChain.h"
0007 #include "TFile.h"
0008 #include "TTree.h"
0009 #include "TString.h"
0010 #include "TObjString.h"
0011 #include "TSystem.h"
0012 #include "TROOT.h"
0013 
0014 #include "TMVA/Factory.h"
0015 #include "TMVA/DataLoader.h"
0016 #include "TMVA/Tools.h"
0017 #include "TMVA/TMVAGui.h"
0018 
0019 int TMVA_D0_D0bar( TString myMethodList = "" )
0020 {
0021    TMVA::Tools::Instance();
0022 
0023    // Default MVA methods to be trained + tested
0024    std::map<std::string,int> Use;
0025 
0026    // Cut optimisation
0027    Use["Cuts"]            = 0;
0028    Use["CutsD"]           = 0;
0029    Use["CutsPCA"]         = 0;
0030    Use["CutsGA"]          = 0;
0031    Use["CutsSA"]          = 0;
0032    //
0033    // 1-dimensional likelihood ("naive Bayes estimator")
0034    Use["Likelihood"]      = 0;
0035    Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
0036    Use["LikelihoodPCA"]   = 0; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
0037    Use["LikelihoodKDE"]   = 0;
0038    Use["LikelihoodMIX"]   = 0;
0039    //
0040    // Mutidimensional likelihood and Nearest-Neighbour methods
0041    Use["PDERS"]           = 0;
0042    Use["PDERSD"]          = 0;
0043    Use["PDERSPCA"]        = 0;
0044    Use["PDEFoam"]         = 0;
0045    Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
0046    Use["KNN"]             = 0; // k-nearest neighbour method
0047    //
0048    // Linear Discriminant Analysis
0049    Use["LD"]              = 0; // Linear Discriminant identical to Fisher
0050    Use["Fisher"]          = 0;
0051    Use["FisherG"]         = 0;
0052    Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
0053    Use["HMatrix"]         = 0;
0054    //
0055    // Function Discriminant analysis
0056    Use["FDA_GA"]          = 0; // minimisation of user-defined function using Genetics Algorithm
0057    Use["FDA_SA"]          = 0;
0058    Use["FDA_MC"]          = 0;
0059    Use["FDA_MT"]          = 0;
0060    Use["FDA_GAMT"]        = 0;
0061    Use["FDA_MCMT"]        = 0;
0062    //
0063    // Neural Networks (all are feed-forward Multilayer Perceptrons)
0064    Use["MLP"]             = 1; // Recommended ANN
0065    Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
0066    Use["MLPBNN"]          = 0; // Recommended ANN with BFGS training method and bayesian regulator
0067    Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
0068    Use["TMlpANN"]         = 0; // ROOT's own ANN
0069 #ifdef R__HAS_TMVAGPU
0070    Use["DNN_GPU"]         = 0; // CUDA-accelerated DNN training.
0071 #else
0072    Use["DNN_GPU"]         = 0;
0073 #endif
0074 
0075 #ifdef R__HAS_TMVACPU
0076    Use["DNN_CPU"]         = 0; // Multi-core accelerated DNN.
0077 #else
0078    Use["DNN_CPU"]         = 0;
0079 #endif
0080    //
0081    // Support Vector Machine
0082    Use["SVM"]             = 0;
0083    //
0084    // Boosted Decision Trees
0085    Use["BDT"]             = 1; // uses Adaptive Boost
0086    Use["BDTG"]            = 0; // uses Gradient Boost
0087    Use["BDTB"]            = 0; // uses Bagging
0088    Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
0089    Use["BDTF"]            = 0; // allow usage of fisher discriminant for node splitting
0090    //
0091    // Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
0092    Use["RuleFit"]         = 0;
0093    // ---------------------------------------------------------------
0094 
0095    std::cout << std::endl;
0096    std::cout << "==> Start TMVAClassification" << std::endl;
0097 
0098    // Select methods
0099    if (myMethodList != "") {
0100       for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
0101 
0102       std::vector<TString> mlist = TMVA::gTools().SplitString( myMethodList, ',' );
0103       for (UInt_t i=0; i<mlist.size(); i++) {
0104          std::string regMethod(mlist[i]);
0105 
0106          if (Use.find(regMethod) == Use.end()) {
0107             std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
0108             for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
0109             std::cout << std::endl;
0110             return 1;
0111          }
0112          Use[regMethod] = 1;
0113       }
0114    }
0115 
0116    // Read training and test data
0117    // (it is also possible to use ASCII format as input -> see TMVA Users Guide)
0118    TFile *input(0);
0119    TString fname = "/sphenix/user/rosstom/analysis/HF-Particle/KFParticle_sPHENIX/Run40Acceptance082922/Run40_D0_Separated_091922.root";
0120    if (!gSystem->AccessPathName( fname )) {
0121       input = TFile::Open( fname ); // check if file in local directory exists
0122    }
0123    else {
0124       TFile::SetCacheFileDir(".");
0125       input = TFile::Open("http://root.cern.ch/files/tmva_class_example.root", "CACHEREAD");
0126    }
0127    if (!input) {
0128       std::cout << "ERROR: could not open data file" << std::endl;
0129       exit(1);
0130    }
0131    std::cout << "--- TMVAClassification       : Using input file: " << input->GetName() << std::endl;
0132 
0133    // Register the training and test trees
0134 
0135    TTree *D0_Tree = (TTree*)input->Get("D0_tree");
0136    TTree *D0bar_Tree = (TTree*)input->Get("D0bar_tree");
0137    TTree *Background_Tree = (TTree*)input->Get("Background_tree");
0138 
0139    // Create a ROOT output file where TMVA will store ntuples, histograms, etc.
0140    TString outfileName( "TMVA_D0Sep_092122.root" );
0141    // TString outfileName("TMVA_D0bar.root");
0142    TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
0143 
0144    // Create the factory object. Later you can choose the methods
0145    // whose performance you'd like to investigate. The factory is
0146    // the only TMVA object you have to interact with
0147    //
0148    // The first argument is the base of the name of all the
0149    // weightfiles in the directory weight/
0150    //
0151    // The second argument is the output file for the training results
0152    // All TMVA output can be suppressed by removing the "!" (not) in
0153    // front of the "Silent" argument in the option string
0154    TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
0155                                                "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" );
0156 
0157    TMVA::DataLoader *dataloader=new TMVA::DataLoader("dataset");
0158    // If you wish to modify default settings
0159    // (please check "src/Config.h" to see all available global options)
0160    //
0161    //    (TMVA::gConfig().GetVariablePlotting()).fTimesRMS = 8.0;
0162    //    (TMVA::gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory";
0163 
0164    // Define the input variables that shall be used for the MVA training
0165    // note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
0166    // [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
0167    dataloader->AddVariable( "outKFP_positive_p", "P_p", "GeV/c", 'F' );
0168    dataloader->AddVariable( "outKFP_negative_p", "N_p", "GeV/c", 'F' );
0169    dataloader->AddVariable( "outKFP_KpPm_invm", "KpPm_invm", "GeV/c^{2}", 'F' );
0170    dataloader->AddVariable( "outKFP_KmPp_invm", "KmPp_invm", "GeV/c^{2}", 'F' );
0171    //dataloader->AddVariable( "outKFP_D0_DIRA", "DIRA", "DIRA", 'F' );
0172    //dataloader->AddVariable( "outKFP_D0_IPchi2", "IPchi2", "IPchi2", 'F' );
0173    //dataloader->AddVariable( "outKFP_D0_pseudorapidity", "pseudorapidity", "#eta", 'F' );
0174 
0175    // You can add so-called "Spectator variables", which are not used in the MVA training,
0176    // but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
0177    // input variables, the response values of all trained MVAs, and the spectator variables
0178 
0179    //dataloader->AddSpectator( "",  "", "GeV", 'F' );
0180 
0181 
0182    // global event weights per tree (see below for setting event-wise weights)
0183    Double_t signalWeight     = 50.0; // increasing the signal weight should help to balance how few signal events there are compared to background
0184    Double_t backgroundWeight = 1.0;
0185 
0186    // You can add an arbitrary number of signal or background trees
0187    dataloader->AddSignalTree    ( D0_Tree,     signalWeight );
0188    dataloader->AddBackgroundTree( D0bar_Tree, signalWeight );
0189    dataloader->AddBackgroundTree( Background_Tree, backgroundWeight );
0190 
0191    // To give different trees for training and testing, do as follows:
0192    //
0193    //     dataloader->AddSignalTree( signalTrainingTree, signalTrainWeight, "Training" );
0194    //     dataloader->AddSignalTree( signalTestTree,     signalTestWeight,  "Test" );
0195 
0196    // Use the following code instead of the above two or four lines to add signal and background
0197    // training and test events "by hand"
0198    // NOTE that in this case one should not give expressions (such as "var1+var2") in the input
0199    //      variable definition, but simply compute the expression before adding the event
0200    // ```cpp
0201    // // --- begin ----------------------------------------------------------
0202    // std::vector<Double_t> vars( 4 ); // vector has size of number of input variables
0203    // Float_t  treevars[4], weight;
0204    //
0205    // // Signal
0206    // for (UInt_t ivar=0; ivar<4; ivar++) signalTree->SetBranchAddress( Form( "var%i", ivar+1 ), &(treevars[ivar]) );
0207    // for (UInt_t i=0; i<signalTree->GetEntries(); i++) {
0208    //    signalTree->GetEntry(i);
0209    //    for (UInt_t ivar=0; ivar<4; ivar++) vars[ivar] = treevars[ivar];
0210    //    // add training and test events; here: first half is training, second is testing
0211    //    // note that the weight can also be event-wise
0212    //    if (i < signalTree->GetEntries()/2.0) dataloader->AddSignalTrainingEvent( vars, signalWeight );
0213    //    else                              dataloader->AddSignalTestEvent    ( vars, signalWeight );
0214    // }
0215    //
0216    // // Background (has event weights)
0217    // background->SetBranchAddress( "weight", &weight );
0218    // for (UInt_t ivar=0; ivar<4; ivar++) background->SetBranchAddress( Form( "var%i", ivar+1 ), &(treevars[ivar]) );
0219    // for (UInt_t i=0; i<background->GetEntries(); i++) {
0220    //    background->GetEntry(i);
0221    //    for (UInt_t ivar=0; ivar<4; ivar++) vars[ivar] = treevars[ivar];
0222    //    // add training and test events; here: first half is training, second is testing
0223    //    // note that the weight can also be event-wise
0224    //    if (i < background->GetEntries()/2) dataloader->AddBackgroundTrainingEvent( vars, backgroundWeight*weight );
0225    //    else                                dataloader->AddBackgroundTestEvent    ( vars, backgroundWeight*weight );
0226    // }
0227    // // --- end ------------------------------------------------------------
0228    // ```
0229    // End of tree registration
0230 
0231    // Set individual event weights (the variables must exist in the original TTree)
0232    // -  for signal    : `dataloader->SetSignalWeightExpression    ("weight1*weight2");`
0233    // -  for background: `dataloader->SetBackgroundWeightExpression("weight1*weight2");`
0234    //dataloader->SetBackgroundWeightExpression( "weight" );
0235 
0236    // Apply additional cuts on the signal and background samples (can be different)
0237    TCut mycuts = ""; // for example: TCut mycuts = "abs(var1)<0.5 && abs(var2-0.5)<1";
0238    TCut mycutb = ""; // for example: TCut mycutb = "abs(var1)<0.5";
0239 
0240    // Tell the dataloader how to use the training and testing events
0241    //
0242    // If no numbers of events are given, half of the events in the tree are used
0243    // for training, and the other half for testing:
0244    //
0245    //    dataloader->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
0246    //
0247    // To also specify the number of testing events, use:
0248    //
0249    //    dataloader->PrepareTrainingAndTestTree( mycut,
0250    //         "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
0251    dataloader->PrepareTrainingAndTestTree( mycuts, mycutb,
0252                                         "nTrain_Signal=1500:nTrain_Background=75000:SplitMode=Random:NormMode=NumEvents:!V" );
0253 
0254    // ### Book MVA methods
0255    //
0256    // Please lookup the various method configuration options in the corresponding cxx files, eg:
0257    // src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
0258    // it is possible to preset ranges in the option string in which the cut optimisation should be done:
0259    // "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
0260 
0261    // Cut optimisation
0262    if (Use["Cuts"])
0263       factory->BookMethod( dataloader, TMVA::Types::kCuts, "Cuts",
0264                            "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
0265 
0266    if (Use["CutsD"])
0267       factory->BookMethod( dataloader, TMVA::Types::kCuts, "CutsD",
0268                            "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
0269 
0270    if (Use["CutsPCA"])
0271       factory->BookMethod( dataloader, TMVA::Types::kCuts, "CutsPCA",
0272                            "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
0273 
0274    if (Use["CutsGA"])
0275       factory->BookMethod( dataloader, TMVA::Types::kCuts, "CutsGA",
0276                            "H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
0277 
0278    if (Use["CutsSA"])
0279       factory->BookMethod( dataloader, TMVA::Types::kCuts, "CutsSA",
0280                            "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
0281 
0282    // Likelihood ("naive Bayes estimator")
0283    if (Use["Likelihood"])
0284       factory->BookMethod( dataloader, TMVA::Types::kLikelihood, "Likelihood",
0285                            "H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
0286 
0287    // Decorrelated likelihood
0288    if (Use["LikelihoodD"])
0289       factory->BookMethod( dataloader, TMVA::Types::kLikelihood, "LikelihoodD",
0290                            "!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
0291 
0292    // PCA-transformed likelihood
0293    if (Use["LikelihoodPCA"])
0294       factory->BookMethod( dataloader, TMVA::Types::kLikelihood, "LikelihoodPCA",
0295                            "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
0296 
0297    // Use a kernel density estimator to approximate the PDFs
0298    if (Use["LikelihoodKDE"])
0299       factory->BookMethod( dataloader, TMVA::Types::kLikelihood, "LikelihoodKDE",
0300                            "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Adaptive:KDEFineFactor=0.3:KDEborder=None:NAvEvtPerBin=50" );
0301 
0302    // Use a variable-dependent mix of splines and kernel density estimator
0303    if (Use["LikelihoodMIX"])
0304       factory->BookMethod( dataloader, TMVA::Types::kLikelihood, "LikelihoodMIX",
0305                            "!H:!V:!TransformOutput:PDFInterpolSig[0]=KDE:PDFInterpolBkg[0]=KDE:PDFInterpolSig[1]=KDE:PDFInterpolBkg[1]=KDE:PDFInterpolSig[2]=Spline2:PDFInterpolBkg[2]=Spline2:PDFInterpolSig[3]=Spline2:PDFInterpolBkg[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" );
0306 
0307    // Test the multi-dimensional probability density estimator
0308    // here are the options strings for the MinMax and RMS methods, respectively:
0309    //
0310    //      "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
0311    //      "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
0312    if (Use["PDERS"])
0313       factory->BookMethod( dataloader, TMVA::Types::kPDERS, "PDERS",
0314                            "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600" );
0315 
0316    if (Use["PDERSD"])
0317       factory->BookMethod( dataloader, TMVA::Types::kPDERS, "PDERSD",
0318                            "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=Decorrelate" );
0319 
0320    if (Use["PDERSPCA"])
0321       factory->BookMethod( dataloader, TMVA::Types::kPDERS, "PDERSPCA",
0322                            "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=PCA" );
0323 
0324    // Multi-dimensional likelihood estimator using self-adapting phase-space binning
0325    if (Use["PDEFoam"])
0326       factory->BookMethod( dataloader, TMVA::Types::kPDEFoam, "PDEFoam",
0327                            "!H:!V:SigBgSeparate=F:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" );
0328 
0329    if (Use["PDEFoamBoost"])
0330       factory->BookMethod( dataloader, TMVA::Types::kPDEFoam, "PDEFoamBoost",
0331                            "!H:!V:Boost_Num=30:Boost_Transform=linear:SigBgSeparate=F:MaxDepth=4:UseYesNoCell=T:DTLogic=MisClassificationError:FillFoamWithOrigWeights=F:TailCut=0:nActiveCells=500:nBin=20:Nmin=400:Kernel=None:Compress=T" );
0332 
0333    // K-Nearest Neighbour classifier (KNN)
0334    if (Use["KNN"])
0335       factory->BookMethod( dataloader, TMVA::Types::kKNN, "KNN",
0336                            "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
0337 
0338    // H-Matrix (chi2-squared) method
0339    if (Use["HMatrix"])
0340       factory->BookMethod( dataloader, TMVA::Types::kHMatrix, "HMatrix", "!H:!V:VarTransform=None" );
0341 
0342    // Linear discriminant (same as Fisher discriminant)
0343    if (Use["LD"])
0344       factory->BookMethod( dataloader, TMVA::Types::kLD, "LD", "H:!V:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
0345 
0346    // Fisher discriminant (same as LD)
0347    if (Use["Fisher"])
0348       factory->BookMethod( dataloader, TMVA::Types::kFisher, "Fisher", "H:!V:Fisher:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
0349 
0350    // Fisher with Gauss-transformed input variables
0351    if (Use["FisherG"])
0352       factory->BookMethod( dataloader, TMVA::Types::kFisher, "FisherG", "H:!V:VarTransform=Gauss" );
0353 
0354    // Composite classifier: ensemble (tree) of boosted Fisher classifiers
0355    if (Use["BoostedFisher"])
0356       factory->BookMethod( dataloader, TMVA::Types::kFisher, "BoostedFisher",
0357                            "H:!V:Boost_Num=20:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=0.2:!Boost_DetailedMonitoring" );
0358 
0359    // Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
0360    if (Use["FDA_MC"])
0361       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_MC",
0362                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" );
0363 
0364    if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options])
0365       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_GA",
0366                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=100:Cycles=2:Steps=5:Trim=True:SaveBestGen=1" );
0367 
0368    if (Use["FDA_SA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options])
0369       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_SA",
0370                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
0371 
0372    if (Use["FDA_MT"])
0373       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_MT",
0374                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
0375 
0376    if (Use["FDA_GAMT"])
0377       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_GAMT",
0378                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
0379 
0380    if (Use["FDA_MCMT"])
0381       factory->BookMethod( dataloader, TMVA::Types::kFDA, "FDA_MCMT",
0382                            "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
0383 
0384    // TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
0385    if (Use["MLP"])
0386       factory->BookMethod( dataloader, TMVA::Types::kMLP, "MLP", "H:!V:EstimatorType=MSE:NeuronType=sigmoid:VarTransform=N:NCycles=21:HiddenLayers=N-1:TestRate=1:UseRegulator" );
0387 
0388    if (Use["MLPBFGS"])
0389       factory->BookMethod( dataloader, TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
0390 
0391    if (Use["MLPBNN"])
0392       factory->BookMethod( dataloader, TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=60:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
0393 
0394 
0395    // Multi-architecture DNN implementation.
0396    if (Use["DNN_CPU"] or Use["DNN_GPU"]) {
0397       // General layout.
0398       TString layoutString ("Layout=TANH|128,TANH|128,TANH|128,LINEAR");
0399 
0400       // Define Training strategy. One could define multiple strategy string separated by the "|" delimiter
0401 
0402       TString trainingStrategyString = ("TrainingStrategy=LearningRate=1e-2,Momentum=0.9,"
0403                                         "ConvergenceSteps=20,BatchSize=100,TestRepetitions=1,"
0404                                         "WeightDecay=1e-4,Regularization=None,"
0405                                         "DropConfig=0.0+0.5+0.5+0.5");
0406 
0407       // General Options.
0408       TString dnnOptions ("!H:V:ErrorStrategy=CROSSENTROPY:VarTransform=N:"
0409                           "WeightInitialization=XAVIERUNIFORM");
0410       dnnOptions.Append (":"); dnnOptions.Append (layoutString);
0411       dnnOptions.Append (":"); dnnOptions.Append (trainingStrategyString);
0412 
0413       // Cuda implementation.
0414       if (Use["DNN_GPU"]) {
0415          TString gpuOptions = dnnOptions + ":Architecture=GPU";
0416          factory->BookMethod(dataloader, TMVA::Types::kDL, "DNN_GPU", gpuOptions);
0417       }
0418       // Multi-core CPU implementation.
0419       if (Use["DNN_CPU"]) {
0420          TString cpuOptions = dnnOptions + ":Architecture=CPU";
0421          factory->BookMethod(dataloader, TMVA::Types::kDL, "DNN_CPU", cpuOptions);
0422       }
0423    }
0424 
0425    // CF(Clermont-Ferrand)ANN
0426    if (Use["CFMlpANN"])
0427       factory->BookMethod( dataloader, TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N"  ); // n_cycles:#nodes:#nodes:...
0428 
0429    // Tmlp(Root)ANN
0430    if (Use["TMlpANN"])
0431       factory->BookMethod( dataloader, TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3"  ); // n_cycles:#nodes:#nodes:...
0432 
0433    // Support Vector Machine
0434    if (Use["SVM"])
0435       factory->BookMethod( dataloader, TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
0436 
0437    // Boosted Decision Trees
0438    if (Use["BDTG"]) // Gradient Boost
0439       factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDTG",
0440                            "!H:!V:NTrees=1000:MinNodeSize=2.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=2" );
0441 
0442    if (Use["BDT"])  // Adaptive Boost
0443       factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDT",
0444                            "!H:!V:NTrees=80:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20:DoBoostMonitor" );
0445 
0446    if (Use["BDTB"]) // Bagging
0447       factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDTB",
0448                            "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20" );
0449 
0450    if (Use["BDTD"]) // Decorrelation + Adaptive Boost
0451       factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDTD",
0452                            "!H:!V:NTrees=400:MinNodeSize=5%:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:VarTransform=Decorrelate" );
0453 
0454    if (Use["BDTF"])  // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
0455       factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDTF",
0456                            "!H:!V:NTrees=50:MinNodeSize=2.5%:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20" );
0457 
0458    // RuleFit -- TMVA implementation of Friedman's method
0459    if (Use["RuleFit"])
0460       factory->BookMethod( dataloader, TMVA::Types::kRuleFit, "RuleFit",
0461                            "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
0462 
0463    // For an example of the category classifier usage, see: TMVAClassificationCategory
0464    //
0465    // --------------------------------------------------------------------------------------------------
0466    //  Now you can optimize the setting (configuration) of the MVAs using the set of training events
0467    // STILL EXPERIMENTAL and only implemented for BDT's !
0468    //
0469    //     factory->OptimizeAllMethods("SigEffAtBkg0.01","Scan");
0470    //     factory->OptimizeAllMethods("ROCIntegral","FitGA");
0471    //
0472    // --------------------------------------------------------------------------------------------------
0473 
0474    // Now you can tell the factory to train, test, and evaluate the MVAs
0475    //
0476    // Train MVAs using the set of training events
0477    factory->TrainAllMethods();
0478 
0479    // Evaluate all MVAs using the set of test events
0480    factory->TestAllMethods();
0481 
0482    // Evaluate and compare performance of all configured MVAs
0483    factory->EvaluateAllMethods();
0484 
0485    // --------------------------------------------------------------
0486 
0487    // Save the output
0488    outputFile->Close();
0489 
0490    std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
0491    std::cout << "==> TMVAClassification is done!" << std::endl;
0492 
0493    delete factory;
0494    delete dataloader;
0495    // Launch the GUI for the root macros
0496    if (!gROOT->IsBatch()) TMVA::TMVAGui( outfileName );
0497 
0498    return 0;
0499 }
0500 
0501 int main( int argc, char** argv )
0502 {
0503    // Select methods (don't look at this code - not of interest)
0504    TString methodList;
0505    for (int i=1; i<argc; i++) {
0506       TString regMethod(argv[i]);
0507       if(regMethod=="-b" || regMethod=="--batch") continue;
0508       if (!methodList.IsNull()) methodList += TString(",");
0509       methodList += regMethod;
0510    }
0511    return TMVA_D0_D0bar(methodList);
0512 }