Back to home page

sPhenix code displayed by LXR

 
 

    


File indexing completed on 2025-08-05 08:14:07

0001 // Class: ReadCuts
0002 // Automatically generated by MethodBase::MakeClass
0003 //
0004 
0005 /* configuration options =====================================================
0006 
0007 #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
0008 
0009 Method         : Cuts::Cuts
0010 TMVA Release   : 4.2.0         [262656]
0011 ROOT Release   : 5.34/38       [336422]
0012 Creator        : vassalli
0013 Date           : Wed Jan 23 17:59:24 2019
0014 Host           : Linux cvmfswrite02.sdcc.bnl.gov 3.10.0-693.11.6.el7.x86_64 #1 SMP Wed Jan 3 18:09:42 CST 2018 x86_64 x86_64 x86_64 GNU/Linux
0015 Dir            : /direct/phenix+u/vassalli/sphenix/single/Training
0016 Training events: 2407
0017 Analysis type  : [Classification]
0018 
0019 
0020 #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
0021 
0022 # Set by User:
0023 # Default:
0024 V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
0025 VerbosityLevel: "Default" [Verbosity level]
0026 VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
0027 H: "False" [Print method-specific help message]
0028 CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)]
0029 IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
0030 FitMethod: "GA" [Minimisation Method (GA, SA, and MC are the primary methods to be used; the others have been introduced for testing purposes and are depreciated)]
0031 EffMethod: "EffSel" [Selection Method]
0032 CutRangeMin[0]: "-1.000000e+00" [Minimum of allowed cut range (set per variable)]
0033     CutRangeMin[1]: "-1.000000e+00"
0034     CutRangeMin[2]: "-1.000000e+00"
0035     CutRangeMin[3]: "-1.000000e+00"
0036     CutRangeMin[4]: "-1.000000e+00"
0037     CutRangeMin[5]: "-1.000000e+00"
0038     CutRangeMin[6]: "-1.000000e+00"
0039 CutRangeMax[0]: "-1.000000e+00" [Maximum of allowed cut range (set per variable)]
0040     CutRangeMax[1]: "-1.000000e+00"
0041     CutRangeMax[2]: "-1.000000e+00"
0042     CutRangeMax[3]: "-1.000000e+00"
0043     CutRangeMax[4]: "-1.000000e+00"
0044     CutRangeMax[5]: "-1.000000e+00"
0045     CutRangeMax[6]: "-1.000000e+00"
0046 VarProp[0]: "NotEnforced" [Categorisation of cuts]
0047     VarProp[1]: "NotEnforced"
0048     VarProp[2]: "NotEnforced"
0049     VarProp[3]: "NotEnforced"
0050     VarProp[4]: "NotEnforced"
0051     VarProp[5]: "NotEnforced"
0052     VarProp[6]: "NotEnforced"
0053 ##
0054 
0055 
0056 #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
0057 
0058 NVar 7
0059 track_deta                    track_deta                    track_deta                    track_deta                                                      'F'    [1.05425715446e-06,1.83402311802]
0060 track_dlayer                  track_dlayer                  track_dlayer                  track_dlayer                                                    'I'    [0,14]
0061 track_layer                   track_layer                   track_layer                   track_layer                                                     'I'    [0,23]
0062 track_pT                      track_pT                      track_pT                      track_pT                                                        'F'    [0.209833949804,34.1584281921]
0063 vtx_radius                    vtx_radius                    vtx_radius                    vtx_radius                                                      'F'    [0.00339345191605,20.9999389648]
0064 vtxTrack_dist                 vtxTrack_dist                 vtxTrack_dist                 vtxTrack_dist                                                   'F'    [0.0258899498731,10.0588207245]
0065 cluster_prob                  cluster_prob                  cluster_prob                  cluster_prob                                                    'F'    [0,0.999874174595]
0066 NSpec 1
0067 vtx_chi2                      vtx_chi2                      vtx_chi2                      F                                                               'F'    [0,3.33078734987e-36]
0068 
0069 
0070 ============================================================================ */
0071 
0072 #include <vector>
0073 #include <cmath>
0074 #include <string>
0075 #include <iostream>
0076 
0077 #ifndef IClassifierReader__def
0078 #define IClassifierReader__def
0079 
0080 class IClassifierReader {
0081 
0082  public:
0083 
0084    // constructor
0085    IClassifierReader() : fStatusIsClean( true ) {}
0086    virtual ~IClassifierReader() {}
0087 
0088    // return classifier response
0089    virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
0090 
0091    // returns classifier status
0092    bool IsStatusClean() const { return fStatusIsClean; }
0093 
0094  protected:
0095 
0096    bool fStatusIsClean;
0097 };
0098 
0099 #endif
0100 
0101 class ReadCuts : public IClassifierReader {
0102 
0103  public:
0104 
0105    // constructor
0106    ReadCuts( std::vector<std::string>& theInputVars ) 
0107       : IClassifierReader(),
0108         fClassName( "ReadCuts" ),
0109         fNvars( 7 ),
0110         fIsNormalised( false )
0111    {      
0112       // the training input variables
0113       const char* inputVars[] = { "track_deta", "track_dlayer", "track_layer", "track_pT", "vtx_radius", "vtxTrack_dist", "cluster_prob" };
0114 
0115       // sanity checks
0116       if (theInputVars.size() <= 0) {
0117          std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
0118          fStatusIsClean = false;
0119       }
0120 
0121       if (theInputVars.size() != fNvars) {
0122          std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
0123                    << theInputVars.size() << " != " << fNvars << std::endl;
0124          fStatusIsClean = false;
0125       }
0126 
0127       // validate input variables
0128       for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
0129          if (theInputVars[ivar] != inputVars[ivar]) {
0130             std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
0131                       << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
0132             fStatusIsClean = false;
0133          }
0134       }
0135 
0136       // initialize min and max vectors (for normalisation)
0137       fVmin[0] = 0;
0138       fVmax[0] = 0;
0139       fVmin[1] = 0;
0140       fVmax[1] = 0;
0141       fVmin[2] = 0;
0142       fVmax[2] = 0;
0143       fVmin[3] = 0;
0144       fVmax[3] = 0;
0145       fVmin[4] = 0;
0146       fVmax[4] = 0;
0147       fVmin[5] = 0;
0148       fVmax[5] = 0;
0149       fVmin[6] = 0;
0150       fVmax[6] = 0;
0151 
0152       // initialize input variable types
0153       fType[0] = 'F';
0154       fType[1] = 'I';
0155       fType[2] = 'I';
0156       fType[3] = 'F';
0157       fType[4] = 'F';
0158       fType[5] = 'F';
0159       fType[6] = 'F';
0160 
0161       // initialize constants
0162       Initialize();
0163 
0164    }
0165 
0166    // destructor
0167    virtual ~ReadCuts() {
0168       Clear(); // method-specific
0169    }
0170 
0171    // the classifier response
0172    // "inputValues" is a vector of input values in the same order as the 
0173    // variables given to the constructor
0174    double GetMvaValue( const std::vector<double>& inputValues ) const;
0175 
0176  private:
0177 
0178    // method-specific destructor
0179    void Clear();
0180 
0181    // common member variables
0182    const char* fClassName;
0183 
0184    const size_t fNvars;
0185    size_t GetNvar()           const { return fNvars; }
0186    char   GetType( int ivar ) const { return fType[ivar]; }
0187 
0188    // normalisation of input variables
0189    const bool fIsNormalised;
0190    bool IsNormalised() const { return fIsNormalised; }
0191    double fVmin[7];
0192    double fVmax[7];
0193    double NormVariable( double x, double xmin, double xmax ) const {
0194       // normalise to output range: [-1, 1]
0195       return 2*(x - xmin)/(xmax - xmin) - 1.0;
0196    }
0197 
0198    // type of input variable: 'F' or 'I'
0199    char   fType[7];
0200 
0201    // initialize internal variables
0202    void Initialize();
0203    double GetMvaValue__( const std::vector<double>& inputValues ) const;
0204 
0205    // private members (method specific)
0206    // not implemented for class: "ReadCuts"
0207 };
0208    inline double ReadCuts::GetMvaValue( const std::vector<double>& inputValues ) const
0209    {
0210       // classifier response value
0211       double retval = 0;
0212 
0213       // classifier response, sanity check first
0214       if (!IsStatusClean()) {
0215          std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response"
0216                    << " because status is dirty" << std::endl;
0217          retval = 0;
0218       }
0219       else {
0220          if (IsNormalised()) {
0221             // normalise variables
0222             std::vector<double> iV;
0223             iV.reserve(inputValues.size());
0224             int ivar = 0;
0225             for (std::vector<double>::const_iterator varIt = inputValues.begin();
0226                  varIt != inputValues.end(); varIt++, ivar++) {
0227                iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));
0228             }
0229             retval = GetMvaValue__( iV );
0230          }
0231          else {
0232             retval = GetMvaValue__( inputValues );
0233          }
0234       }
0235 
0236       return retval;
0237    }