Back to home page

sPhenix code displayed by LXR

 
 

    


File indexing completed on 2025-08-06 08:15:10

0001 // Class: ReadPDEFoam
0002 // Automatically generated by MethodBase::MakeClass
0003 //
0004 
0005 /* configuration options =====================================================
0006 
0007 #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
0008 
0009 Method         : PDEFoam::PDEFoam
0010 TMVA Release   : 4.2.0         [262656]
0011 ROOT Release   : 5.34/38       [336422]
0012 Creator        : vassalli
0013 Date           : Wed Jan 23 17:51:47 2019
0014 Host           : Linux cvmfswrite02.sdcc.bnl.gov 3.10.0-693.11.6.el7.x86_64 #1 SMP Wed Jan 3 18:09:42 CST 2018 x86_64 x86_64 x86_64 GNU/Linux
0015 Dir            : /direct/phenix+u/vassalli/sphenix/single/Training
0016 Training events: 2407
0017 Analysis type  : [Classification]
0018 
0019 
0020 #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
0021 
0022 # Set by User:
0023 SigBgSeparate: "True" [Separate foams for signal and background]
0024 VolFrac: "5.880000e-02" [Size of sampling box, used for density calculation during foam build-up (maximum value: 1.0 is equivalent to volume of entire foam)]
0025 # Default:
0026 V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
0027 VerbosityLevel: "Default" [Verbosity level]
0028 VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
0029 H: "False" [Print method-specific help message]
0030 CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)]
0031 IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
0032 TailCut: "1.000000e-03" [Fraction of outlier events that are excluded from the foam in each dimension]
0033 nActiveCells: "500" [Maximum number of active cells to be created by the foam]
0034 nSampl: "2000" [Number of generated MC events per cell]
0035 nBin: "5" [Number of bins in edge histograms]
0036 Compress: "True" [Compress foam output file]
0037 MultiTargetRegression: "False" [Do regression with multiple targets]
0038 Nmin: "100" [Number of events in cell required to split cell]
0039 MaxDepth: "0" [Maximum depth of cell tree (0=unlimited)]
0040 FillFoamWithOrigWeights: "False" [Fill foam with original or boost weights]
0041 UseYesNoCell: "False" [Return -1 or 1 for bkg or signal like events]
0042 DTLogic: "None" [Use decision tree algorithm to split cells]
0043 Kernel: "None" [Kernel type used]
0044 TargetSelection: "Mean" [Target selection method]
0045 ##
0046 
0047 
0048 #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
0049 
0050 NVar 10
0051 track_deta                    track_deta                    track_deta                    track_deta                                                      'F'    [1.05425715446e-06,1.83402311802]
0052 track_dlayer                  track_dlayer                  track_dlayer                  track_dlayer                                                    'I'    [0,14]
0053 track_layer                   track_layer                   track_layer                   track_layer                                                     'I'    [0,23]
0054 track_pT                      track_pT                      track_pT                      track_pT                                                        'F'    [0.209833949804,34.1584281921]
0055 approach_dist                 approach_dist                 approach_dist                 approach_dist                                                   'F'    [1.69032391568e-05,12.8133029938]
0056 vtx_radius                    vtx_radius                    vtx_radius                    vtx_radius                                                      'F'    [0.00339345191605,20.9999389648]
0057 vtxTrack_dist                 vtxTrack_dist                 vtxTrack_dist                 vtxTrack_dist                                                   'F'    [0.0258899498731,10.0588207245]
0058 photon_m                      photon_m                      photon_m                      photon_m                                                        'F'    [1.044480443,713.936157227]
0059 photon_pT                     photon_pT                     photon_pT                     photon_pT                                                       'F'    [0.061235960573,5008.76708984]
0060 cluster_prob                  cluster_prob                  cluster_prob                  cluster_prob                                                    'F'    [0,0.999874174595]
0061 NSpec 1
0062 vtx_chi2                      vtx_chi2                      vtx_chi2                      F                                                               'F'    [0,3.33078734987e-36]
0063 
0064 
0065 ============================================================================ */
0066 
0067 #include <vector>
0068 #include <cmath>
0069 #include <string>
0070 #include <iostream>
0071 
0072 #ifndef IClassifierReader__def
0073 #define IClassifierReader__def
0074 
0075 class IClassifierReader {
0076 
0077  public:
0078 
0079    // constructor
0080    IClassifierReader() : fStatusIsClean( true ) {}
0081    virtual ~IClassifierReader() {}
0082 
0083    // return classifier response
0084    virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
0085 
0086    // returns classifier status
0087    bool IsStatusClean() const { return fStatusIsClean; }
0088 
0089  protected:
0090 
0091    bool fStatusIsClean;
0092 };
0093 
0094 #endif
0095 
0096 class ReadPDEFoam : public IClassifierReader {
0097 
0098  public:
0099 
0100    // constructor
0101    ReadPDEFoam( std::vector<std::string>& theInputVars ) 
0102       : IClassifierReader(),
0103         fClassName( "ReadPDEFoam" ),
0104         fNvars( 10 ),
0105         fIsNormalised( false )
0106    {      
0107       // the training input variables
0108       const char* inputVars[] = { "track_deta", "track_dlayer", "track_layer", "track_pT", "approach_dist", "vtx_radius", "vtxTrack_dist", "photon_m", "photon_pT", "cluster_prob" };
0109 
0110       // sanity checks
0111       if (theInputVars.size() <= 0) {
0112          std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
0113          fStatusIsClean = false;
0114       }
0115 
0116       if (theInputVars.size() != fNvars) {
0117          std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
0118                    << theInputVars.size() << " != " << fNvars << std::endl;
0119          fStatusIsClean = false;
0120       }
0121 
0122       // validate input variables
0123       for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
0124          if (theInputVars[ivar] != inputVars[ivar]) {
0125             std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
0126                       << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
0127             fStatusIsClean = false;
0128          }
0129       }
0130 
0131       // initialize min and max vectors (for normalisation)
0132       fVmin[0] = 0;
0133       fVmax[0] = 0;
0134       fVmin[1] = 0;
0135       fVmax[1] = 0;
0136       fVmin[2] = 0;
0137       fVmax[2] = 0;
0138       fVmin[3] = 0;
0139       fVmax[3] = 0;
0140       fVmin[4] = 0;
0141       fVmax[4] = 0;
0142       fVmin[5] = 0;
0143       fVmax[5] = 0;
0144       fVmin[6] = 0;
0145       fVmax[6] = 0;
0146       fVmin[7] = 0;
0147       fVmax[7] = 0;
0148       fVmin[8] = 0;
0149       fVmax[8] = 0;
0150       fVmin[9] = 0;
0151       fVmax[9] = 0;
0152 
0153       // initialize input variable types
0154       fType[0] = 'F';
0155       fType[1] = 'I';
0156       fType[2] = 'I';
0157       fType[3] = 'F';
0158       fType[4] = 'F';
0159       fType[5] = 'F';
0160       fType[6] = 'F';
0161       fType[7] = 'F';
0162       fType[8] = 'F';
0163       fType[9] = 'F';
0164 
0165       // initialize constants
0166       Initialize();
0167 
0168    }
0169 
0170    // destructor
0171    virtual ~ReadPDEFoam() {
0172       Clear(); // method-specific
0173    }
0174 
0175    // the classifier response
0176    // "inputValues" is a vector of input values in the same order as the 
0177    // variables given to the constructor
0178    double GetMvaValue( const std::vector<double>& inputValues ) const;
0179 
0180  private:
0181 
0182    // method-specific destructor
0183    void Clear();
0184 
0185    // common member variables
0186    const char* fClassName;
0187 
0188    const size_t fNvars;
0189    size_t GetNvar()           const { return fNvars; }
0190    char   GetType( int ivar ) const { return fType[ivar]; }
0191 
0192    // normalisation of input variables
0193    const bool fIsNormalised;
0194    bool IsNormalised() const { return fIsNormalised; }
0195    double fVmin[10];
0196    double fVmax[10];
0197    double NormVariable( double x, double xmin, double xmax ) const {
0198       // normalise to output range: [-1, 1]
0199       return 2*(x - xmin)/(xmax - xmin) - 1.0;
0200    }
0201 
0202    // type of input variable: 'F' or 'I'
0203    char   fType[10];
0204 
0205    // initialize internal variables
0206    void Initialize();
0207    double GetMvaValue__( const std::vector<double>& inputValues ) const;
0208 
0209    // private members (method specific)
0210    inline double ReadPDEFoam::GetMvaValue( const std::vector<double>& inputValues ) const
0211    {
0212       // classifier response value
0213       double retval = 0;
0214 
0215       // classifier response, sanity check first
0216       if (!IsStatusClean()) {
0217          std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response"
0218                    << " because status is dirty" << std::endl;
0219          retval = 0;
0220       }
0221       else {
0222          if (IsNormalised()) {
0223             // normalise variables
0224             std::vector<double> iV;
0225             iV.reserve(inputValues.size());
0226             int ivar = 0;
0227             for (std::vector<double>::const_iterator varIt = inputValues.begin();
0228                  varIt != inputValues.end(); varIt++, ivar++) {
0229                iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));
0230             }
0231             retval = GetMvaValue__( iV );
0232          }
0233          else {
0234             retval = GetMvaValue__( inputValues );
0235          }
0236       }
0237 
0238       return retval;
0239    }