|
[Rivet-svn] r1793 - in trunk: . include/Rivet include/Rivet/Analyses src/Analysesblackhole at projects.hepforge.org blackhole at projects.hepforge.orgMon Aug 31 15:21:02 BST 2009
Author: buckley Date: Mon Aug 31 15:21:02 2009 New Revision: 1793 Log: All analysis headers merged Deleted: trunk/include/Rivet/Analyses/OPAL_1998_S3780481.hh trunk/include/Rivet/Analyses/OPAL_2004_S6132243.hh trunk/include/Rivet/Analyses/ZEUS_2001_S4815815.hh Modified: trunk/ChangeLog trunk/include/Rivet/Makefile.am trunk/src/Analyses/OPAL_1998_S3780481.cc trunk/src/Analyses/OPAL_2004_S6132243.cc trunk/src/Analyses/ZEUS_2001_S4815815.cc Modified: trunk/ChangeLog ============================================================================== --- trunk/ChangeLog Mon Aug 31 15:12:13 2009 (r1792) +++ trunk/ChangeLog Mon Aug 31 15:21:02 2009 (r1793) @@ -1,12 +1,7 @@ 2009-08-31 Andy Buckley <andy at insectnation.org> - * Removing headers for PDG analyses and MC analyses. - - * Removing headers for STAR analyses. - - * Cleaning and removing headers from UA1 and UA5 analyses. - - * Removing headers for D0 analyses. + * Removing/merging headers for all analyses except for the special + MC_JetAnalysis base class. * Exit with an error message if addProjection is used twice from the same parent with distinct projections. Modified: trunk/include/Rivet/Makefile.am ============================================================================== --- trunk/include/Rivet/Makefile.am Mon Aug 31 15:12:13 2009 (r1792) +++ trunk/include/Rivet/Makefile.am Mon Aug 31 15:21:02 2009 (r1793) @@ -30,9 +30,6 @@ ## Standard analyses nobase_dist_noinst_HEADERS += \ - Analyses/OPAL_1998_S3780481.hh \ - Analyses/OPAL_2004_S6132243.hh \ - Analyses/ZEUS_2001_S4815815.hh \ Analyses/MC_JetAnalysis.hh Modified: trunk/src/Analyses/OPAL_1998_S3780481.cc ============================================================================== --- trunk/src/Analyses/OPAL_1998_S3780481.cc Mon Aug 31 15:12:13 2009 (r1792) +++ trunk/src/Analyses/OPAL_1998_S3780481.cc Mon Aug 31 15:21:02 2009 (r1793) @@ -1,7 +1,6 @@ // -*- C++ -*- -#include "Rivet/Rivet.hh" +#include "Rivet/Analysis.hh" #include "Rivet/RivetAIDA.hh" -#include "Rivet/Analyses/OPAL_1998_S3780481.hh" #include "Rivet/Tools/ParticleIDMethods.hh" #include "Rivet/Projections/Beam.hh" #include "Rivet/Projections/FinalState.hh" @@ -11,67 +10,75 @@ namespace Rivet { - // Constructor - OPAL_1998_S3780481::OPAL_1998_S3780481() - : Analysis("OPAL_1998_S3780481") - { - setBeams(ELECTRON, POSITRON); - addProjection(Beam(), "Beams"); - addProjection(ChargedFinalState(), "FS"); - addProjection(InitialQuarks(), "IQF"); - _weightedTotalPartNum = 0; - _SumOfudsWeights = 0; - _SumOfcWeights = 0; - _SumOfbWeights = 0; - } - - - void OPAL_1998_S3780481::analyze(const Event& e) { - // First, veto on leptonic events by requiring at least 4 charged FS particles - const FinalState& fs = applyProjection<FinalState>(e, "FS"); - const size_t numParticles = fs.particles().size(); - - // Even if we only generate hadronic events, we still need a cut on numCharged >= 2. - if (numParticles < 2) { - getLog() << Log::DEBUG << "Failed ncharged cut" << endl; - vetoEvent; + /// @brief OPAL flavour dependent fragmentation paper + /// @author Hendrik Hoeth + class OPAL_1998_S3780481 : public Analysis { + public: + + /// Constructor + OPAL_1998_S3780481() + : Analysis("OPAL_1998_S3780481") + { + setBeams(ELECTRON, POSITRON); + addProjection(Beam(), "Beams"); + addProjection(ChargedFinalState(), "FS"); + addProjection(InitialQuarks(), "IQF"); + _weightedTotalPartNum = 0; + _SumOfudsWeights = 0; + _SumOfcWeights = 0; + _SumOfbWeights = 0; } - getLog() << Log::DEBUG << "Passed ncharged cut" << endl; - - // Get event weight for histo filling - const double weight = e.weight(); - _weightedTotalPartNum += numParticles * weight; - - // Get beams and average beam momentum - const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams(); - const double meanBeamMom = ( beams.first.momentum().vector3().mod() + - beams.second.momentum().vector3().mod() ) / 2.0; - getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl; - - int flavour = 0; - const InitialQuarks& iqf = applyProjection<InitialQuarks>(e, "IQF"); - - // If we only have two quarks (qqbar), just take the flavour. - // If we have more than two quarks, look for the highest energetic q-qbar pair. - if (iqf.particles().size() == 2) { - flavour = abs( iqf.particles().front().pdgId() ); - } - else { - std::map<int, double> quarkmap; - foreach (const Particle& p, iqf.particles()) { - if (quarkmap[p.pdgId()] < p.momentum().E()) { - quarkmap[p.pdgId()] = p.momentum().E(); - } + + + /// @name Analysis methods + //@{ + + void analyze(const Event& e) { + // First, veto on leptonic events by requiring at least 4 charged FS particles + const FinalState& fs = applyProjection<FinalState>(e, "FS"); + const size_t numParticles = fs.particles().size(); + + // Even if we only generate hadronic events, we still need a cut on numCharged >= 2. + if (numParticles < 2) { + getLog() << Log::DEBUG << "Failed ncharged cut" << endl; + vetoEvent; + } + getLog() << Log::DEBUG << "Passed ncharged cut" << endl; + + // Get event weight for histo filling + const double weight = e.weight(); + _weightedTotalPartNum += numParticles * weight; + + // Get beams and average beam momentum + const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams(); + const double meanBeamMom = ( beams.first.momentum().vector3().mod() + + beams.second.momentum().vector3().mod() ) / 2.0; + getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl; + + int flavour = 0; + const InitialQuarks& iqf = applyProjection<InitialQuarks>(e, "IQF"); + + // If we only have two quarks (qqbar), just take the flavour. + // If we have more than two quarks, look for the highest energetic q-qbar pair. + if (iqf.particles().size() == 2) { + flavour = abs( iqf.particles().front().pdgId() ); } - double maxenergy = 0.; - for (int i = 1; i <= 5; ++i) { - if (quarkmap[i]+quarkmap[-i] > maxenergy) { - flavour = i; + else { + map<int, double> quarkmap; + foreach (const Particle& p, iqf.particles()) { + if (quarkmap[p.pdgId()] < p.momentum().E()) { + quarkmap[p.pdgId()] = p.momentum().E(); + } + } + double maxenergy = 0.; + for (int i = 1; i <= 5; ++i) { + if (quarkmap[i]+quarkmap[-i] > maxenergy) { + flavour = i; + } } } - } - - switch (flavour) { + + switch (flavour) { case 1: case 2: case 3: @@ -83,16 +90,16 @@ case 5: _SumOfbWeights += weight; break; - } - - foreach (const Particle& p, fs.particles()) { - const double xp = p.momentum().vector3().mod()/meanBeamMom; - const double logxp = -std::log(xp); - _histXpall->fill(xp, weight); - _histLogXpall->fill(logxp, weight); - _histMultiChargedall->fill(_histMultiChargedall->binMean(0), weight); - switch (flavour) { - /// @todo Use PDG code enums + } + + foreach (const Particle& p, fs.particles()) { + const double xp = p.momentum().vector3().mod()/meanBeamMom; + const double logxp = -std::log(xp); + _histXpall->fill(xp, weight); + _histLogXpall->fill(logxp, weight); + _histMultiChargedall->fill(_histMultiChargedall->binMean(0), weight); + switch (flavour) { + /// @todo Use PDG code enums case 1: case 2: case 3: @@ -110,49 +117,80 @@ _histLogXpb->fill(logxp, weight); _histMultiChargedb->fill(_histMultiChargedb->binMean(0), weight); break; + } } + + } + + + void init() { + _histXpuds = bookHistogram1D(1, 1, 1); + _histXpc = bookHistogram1D(2, 1, 1); + _histXpb = bookHistogram1D(3, 1, 1); + _histXpall = bookHistogram1D(4, 1, 1); + _histLogXpuds = bookHistogram1D(5, 1, 1); + _histLogXpc = bookHistogram1D(6, 1, 1); + _histLogXpb = bookHistogram1D(7, 1, 1); + _histLogXpall = bookHistogram1D(8, 1, 1); + _histMultiChargeduds = bookHistogram1D(9, 1, 1); + _histMultiChargedc = bookHistogram1D(9, 1, 2); + _histMultiChargedb = bookHistogram1D(9, 1, 3); + _histMultiChargedall = bookHistogram1D(9, 1, 4); } + + + /// Finalize + void finalize() { + const double avgNumParts = _weightedTotalPartNum / sumOfWeights(); + normalize(_histXpuds , avgNumParts); + normalize(_histXpc , avgNumParts); + normalize(_histXpb , avgNumParts); + normalize(_histXpall , avgNumParts); + normalize(_histLogXpuds , avgNumParts); + normalize(_histLogXpc , avgNumParts); + normalize(_histLogXpb , avgNumParts); + normalize(_histLogXpall , avgNumParts); + + scale(_histMultiChargeduds, 1.0/_SumOfudsWeights); + scale(_histMultiChargedc , 1.0/_SumOfcWeights); + scale(_histMultiChargedb , 1.0/_SumOfbWeights); + scale(_histMultiChargedall, 1.0/sumOfWeights()); + } + + //@} - } + private: - void OPAL_1998_S3780481::init() { - _histXpuds = bookHistogram1D(1, 1, 1); - _histXpc = bookHistogram1D(2, 1, 1); - _histXpb = bookHistogram1D(3, 1, 1); - _histXpall = bookHistogram1D(4, 1, 1); - _histLogXpuds = bookHistogram1D(5, 1, 1); - _histLogXpc = bookHistogram1D(6, 1, 1); - _histLogXpb = bookHistogram1D(7, 1, 1); - _histLogXpall = bookHistogram1D(8, 1, 1); - _histMultiChargeduds = bookHistogram1D(9, 1, 1); - _histMultiChargedc = bookHistogram1D(9, 1, 2); - _histMultiChargedb = bookHistogram1D(9, 1, 3); - _histMultiChargedall = bookHistogram1D(9, 1, 4); - } - - - // Finalize - void OPAL_1998_S3780481::finalize() { - const double avgNumParts = _weightedTotalPartNum / sumOfWeights(); - normalize(_histXpuds , avgNumParts); - normalize(_histXpc , avgNumParts); - normalize(_histXpb , avgNumParts); - normalize(_histXpall , avgNumParts); - normalize(_histLogXpuds , avgNumParts); - normalize(_histLogXpc , avgNumParts); - normalize(_histLogXpb , avgNumParts); - normalize(_histLogXpall , avgNumParts); - - scale(_histMultiChargeduds, 1.0/_SumOfudsWeights); - scale(_histMultiChargedc , 1.0/_SumOfcWeights); - scale(_histMultiChargedb , 1.0/_SumOfbWeights); - scale(_histMultiChargedall, 1.0/sumOfWeights()); - } + /// Store the weighted sums of numbers of charged / charged+neutral + /// particles - used to calculate average number of particles for the + /// inclusive single particle distributions' normalisations. + double _weightedTotalPartNum; + + double _SumOfudsWeights; + double _SumOfcWeights; + double _SumOfbWeights; + + AIDA::IHistogram1D *_histXpuds; + AIDA::IHistogram1D *_histXpc; + AIDA::IHistogram1D *_histXpb; + AIDA::IHistogram1D *_histXpall; + AIDA::IHistogram1D *_histLogXpuds; + AIDA::IHistogram1D *_histLogXpc; + AIDA::IHistogram1D *_histLogXpb; + AIDA::IHistogram1D *_histLogXpall; + AIDA::IHistogram1D *_histMultiChargeduds; + AIDA::IHistogram1D *_histMultiChargedc; + AIDA::IHistogram1D *_histMultiChargedb; + AIDA::IHistogram1D *_histMultiChargedall; + //@} + }; + + // This global object acts as a hook for the plugin system AnalysisBuilder<OPAL_1998_S3780481> plugin_OPAL_1998_S3780481; - + } Modified: trunk/src/Analyses/OPAL_2004_S6132243.cc ============================================================================== --- trunk/src/Analyses/OPAL_2004_S6132243.cc Mon Aug 31 15:12:13 2009 (r1792) +++ trunk/src/Analyses/OPAL_2004_S6132243.cc Mon Aug 31 15:21:02 2009 (r1793) @@ -1,14 +1,19 @@ // -*- C++ -*- -#include "Rivet/Tools/Logging.hh" +#include "Rivet/Analysis.hh" #include "Rivet/RivetAIDA.hh" -#include "Rivet/Analyses/OPAL_2004_S6132243.hh" +#include "Rivet/Tools/Logging.hh" namespace Rivet { - void OPAL_2004_S6132243::init() { } - void OPAL_2004_S6132243::analyze(const Event & event) { } - void OPAL_2004_S6132243::finalize() { } + class OPAL_2004_S6132243 : public Analysis { + + OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243") { } + void init() { } + void analyze(const Event & event) { } + void finalize() { } + + }; // This global object acts as a hook for the plugin system Modified: trunk/src/Analyses/ZEUS_2001_S4815815.cc ============================================================================== --- trunk/src/Analyses/ZEUS_2001_S4815815.cc Mon Aug 31 15:12:13 2009 (r1792) +++ trunk/src/Analyses/ZEUS_2001_S4815815.cc Mon Aug 31 15:21:02 2009 (r1793) @@ -1,52 +1,80 @@ // -*- C++ -*- -#include "Rivet/Tools/Logging.hh" +#include "Rivet/Analysis.hh" #include "Rivet/RivetAIDA.hh" -#include "Rivet/Analyses/ZEUS_2001_S4815815.hh" +#include "Rivet/Tools/Logging.hh" #include "Rivet/Projections/FastJets.hh" namespace Rivet { - // Constructor - ZEUS_2001_S4815815::ZEUS_2001_S4815815() - : Analysis("ZEUS_2001_S4815815") - { - setBeams(POSITRON, PROTON); - FinalState fs; - addProjection(fs, "FS"); - /// @todo This is the *wrong* jet def: correct it! - getLog() << Log::WARN << "This analysis uses the wrong jet definition: the " - << "paper just says 'a cone algorithm was applied to the CAL cells and jets " - << "were reconstructed using the energies and positions of these cells'" << endl; - addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets"); - } - - - // Book histograms - void ZEUS_2001_S4815815::init() { - /// @todo This doesn't seem to correspond to the plots in the paper (SPIRES 4730372) - _histJetEt1 = bookHistogram1D("JetET1", 11, 14.0, 75.0); - } - - - // Do the analysis - void ZEUS_2001_S4815815::analyze(const Event& event) { - const FastJets& jets = applyProjection<FastJets>(event, "Jets"); - const size_t nj = jets.size(); - getLog() << Log::INFO << "Jet multiplicity = " << nj << endl; - - // Fill histograms - PseudoJets jetList = jets.pseudoJets(); - for (PseudoJets::const_iterator j = jetList.begin(); j != jetList.end(); ++j) { - _histJetEt1->fill(j->perp(), event.weight() ); + /// @brief ZEUS dijet photoproduction study used in the ZEUS Jets PDF fit + /// + /// This class is a reproduction of the HZTool routine for the ZEUS + /// dijet photoproduction paper which was used in the ZEUS Jets PDF fit. + /// + /// @author Jon Butterworth + class ZEUS_2001_S4815815 : public Analysis { + + public: + + /// Default constructor. + ZEUS_2001_S4815815() + : Analysis("ZEUS_2001_S4815815") + { + setBeams(POSITRON, PROTON); + FinalState fs; + addProjection(fs, "FS"); + /// @todo This is the *wrong* jet def: correct it! + getLog() << Log::WARN << "This analysis uses the wrong jet definition: the " + << "paper just says 'a cone algorithm was applied to the CAL cells and jets " + << "were reconstructed using the energies and positions of these cells'" << endl; + addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets"); + } + + + /// @name Analysis methods + //@{ + + // Book histograms + void init() { + /// @todo This doesn't seem to correspond to the plots in the paper (SPIRES 4730372) + _histJetEt1 = bookHistogram1D("JetET1", 11, 14.0, 75.0); } - } - // Finalize - void ZEUS_2001_S4815815::finalize() { } + // Do the analysis + void analyze(const Event& event) { + const FastJets& jets = applyProjection<FastJets>(event, "Jets"); + const size_t nj = jets.size(); + getLog() << Log::INFO << "Jet multiplicity = " << nj << endl; + + // Fill histograms + PseudoJets jetList = jets.pseudoJets(); + for (PseudoJets::const_iterator j = jetList.begin(); j != jetList.end(); ++j) { + _histJetEt1->fill(j->perp(), event.weight() ); + } + } + + + // Finalize + void finalize() { + // + } + + //@} + + private: + + /// @name Histograms + //@{ + AIDA::IHistogram1D* _histJetEt1; + //@} + + }; + + // This global object acts as a hook for the plugin system AnalysisBuilder<ZEUS_2001_S4815815> plugin_ZEUS_2001_S4815815;
More information about the Rivet-svn mailing list |