[Rivet-svn] r1792 - in trunk: include/Rivet include/Rivet/Analyses src/Analyses

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Mon Aug 31 15:12:13 BST 2009


Author: buckley
Date: Mon Aug 31 15:12:13 2009
New Revision: 1792

Log:
Merging/removing headers of DELPHI and H1 analyses

Deleted:
   trunk/include/Rivet/Analyses/DELPHI_1995_S3137023.hh
   trunk/include/Rivet/Analyses/DELPHI_1996_S3430090.hh
   trunk/include/Rivet/Analyses/DELPHI_2002_069_CONF_603.hh
   trunk/include/Rivet/Analyses/DELPHI_2003_WUD_03_11.hh
   trunk/include/Rivet/Analyses/H1_1994_S2919893.hh
   trunk/include/Rivet/Analyses/H1_1995_S3167097.hh
   trunk/include/Rivet/Analyses/H1_2000_S4129130.hh
Modified:
   trunk/include/Rivet/Makefile.am
   trunk/src/Analyses/DELPHI_1995_S3137023.cc
   trunk/src/Analyses/DELPHI_1996_S3430090.cc
   trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc
   trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc
   trunk/src/Analyses/H1_1994_S2919893.cc
   trunk/src/Analyses/H1_1995_S3167097.cc
   trunk/src/Analyses/H1_2000_S4129130.cc

Modified: trunk/include/Rivet/Makefile.am
==============================================================================
--- trunk/include/Rivet/Makefile.am	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/include/Rivet/Makefile.am	Mon Aug 31 15:12:13 2009	(r1792)
@@ -30,13 +30,6 @@
 
 ## Standard analyses
 nobase_dist_noinst_HEADERS += \
-  Analyses/DELPHI_1995_S3137023.hh \
-  Analyses/DELPHI_1996_S3430090.hh \
-  Analyses/DELPHI_2002_069_CONF_603.hh \
-  Analyses/DELPHI_2003_WUD_03_11.hh \
-  Analyses/H1_1994_S2919893.hh \
-  Analyses/H1_1995_S3167097.hh \
-  Analyses/H1_2000_S4129130.hh \
   Analyses/OPAL_1998_S3780481.hh \
   Analyses/OPAL_2004_S6132243.hh \
   Analyses/ZEUS_2001_S4815815.hh \

Modified: trunk/src/Analyses/DELPHI_1995_S3137023.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_1995_S3137023.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/DELPHI_1995_S3137023.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,7 +1,6 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/DELPHI_1995_S3137023.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/Beam.hh"
 #include "Rivet/Projections/FinalState.hh"
@@ -11,75 +10,100 @@
 namespace Rivet {
 
 
-  // Constructor
-  DELPHI_1995_S3137023::DELPHI_1995_S3137023() 
-    : Analysis("DELPHI_1995_S3137023")
-  {
-    setBeams(ELECTRON, POSITRON); 
-    addProjection(Beam(), "Beams");
-    addProjection(ChargedFinalState(), "FS");
-    addProjection(UnstableFinalState(), "UFS");
-    _weightedTotalNumXiMinus = 0;
-    _weightedTotalNumSigma1385Plus = 0;
-  }
-
-
-  void DELPHI_1995_S3137023::analyze(const Event& e) {
-    // First, veto on leptonic events by requiring at least 4 charged FS particles
-    const FinalState& fs = applyProjection<FinalState>(e, "FS");
-    const size_t numParticles = fs.particles().size();
-
-    // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
-    if (numParticles < 2) {
-      getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
-      vetoEvent;
+  /// @brief DELPHI strange baryon paper
+  /// @author Hendrik Hoeth
+  class DELPHI_1995_S3137023 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_1995_S3137023() 
+      : Analysis("DELPHI_1995_S3137023")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      _weightedTotalNumXiMinus = 0;
+      _weightedTotalNumSigma1385Plus = 0;
     }
-    getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
 
-    // Get event weight for histo filling
-    const double weight = e.weight();
-
-    // Get beams and average beam momentum
-    const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-    const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
-                                 beams.second.momentum().vector3().mod() ) / 2.0;
-    getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-
-    // Final state of unstable particles to get particle spectra
-    const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
-
-    for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
-      int id = abs(p->pdgId());
-      switch (id) {
-         case 3312:
-            _histXpXiMinus->fill(p->momentum().vector3().mod()/meanBeamMom, weight);
-            _weightedTotalNumXiMinus += weight;
-            break;
-         case 3114:
-            _histXpSigma1385Plus->fill(p->momentum().vector3().mod()/meanBeamMom, weight);
-            _weightedTotalNumSigma1385Plus += weight;
-            break;
-      }
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _histXpXiMinus       = bookHistogram1D(2, 1, 1);
+      _histXpSigma1385Plus = bookHistogram1D(3, 1, 1);
     }
 
-  }
-
-
-
-  void DELPHI_1995_S3137023::init() {
-    _histXpXiMinus       = bookHistogram1D(2, 1, 1);
-    _histXpSigma1385Plus = bookHistogram1D(3, 1, 1);
-  }
-
-  // Finalize
-  void DELPHI_1995_S3137023::finalize() { 
-    normalize(_histXpXiMinus       , _weightedTotalNumXiMinus/sumOfWeights());
-    normalize(_histXpSigma1385Plus , _weightedTotalNumSigma1385Plus/sumOfWeights());
-  }
 
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+      
+      foreach (const Particle& p, ufs.particles()) {
+        const int id = abs(p.pdgId());
+        switch (id) {
+        case 3312:
+          _histXpXiMinus->fill(p.momentum().vector3().mod()/meanBeamMom, weight);
+          _weightedTotalNumXiMinus += weight;
+          break;
+        case 3114:
+          _histXpSigma1385Plus->fill(p.momentum().vector3().mod()/meanBeamMom, weight);
+          _weightedTotalNumSigma1385Plus += weight;
+          break;
+        }
+      }
+      
+    }
+        
+   
+    /// Finalize
+    void finalize() { 
+      normalize(_histXpXiMinus       , _weightedTotalNumXiMinus/sumOfWeights());
+      normalize(_histXpSigma1385Plus , _weightedTotalNumSigma1385Plus/sumOfWeights());
+    }
+    
+    //@}
 
 
+  private:
+    
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalNumXiMinus;
+    double _weightedTotalNumSigma1385Plus;
+    
+    AIDA::IHistogram1D *_histXpXiMinus;
+    AIDA::IHistogram1D *_histXpSigma1385Plus;
+    //@}
+    
+  };
+  
+  
+  
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<DELPHI_1995_S3137023> plugin_DELPHI_1995_S3137023;
-
+  
 }

Modified: trunk/src/Analyses/DELPHI_1996_S3430090.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_1996_S3430090.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/DELPHI_1996_S3430090.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,7 +1,6 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/DELPHI_1996_S3430090.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/Beam.hh"
 #include "Rivet/Projections/Sphericity.hh"
@@ -16,404 +15,516 @@
 namespace Rivet {
 
 
-  DELPHI_1996_S3430090::DELPHI_1996_S3430090() 
-    : Analysis("DELPHI_1996_S3430090")
-  {
-    setBeams(ELECTRON, POSITRON); 
-    addProjection(Beam(), "Beams");
-    const ChargedFinalState cfs;
-    addProjection(cfs, "FS");
-    addProjection(UnstableFinalState(), "UFS");
-    addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
-    addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
-    addProjection(Sphericity(cfs), "Sphericity");
-    addProjection(ParisiTensor(cfs), "Parisi");
-    const Thrust thrust(cfs);
-    addProjection(thrust, "Thrust");
-    addProjection(Hemispheres(thrust), "Hemispheres");
-    _weightedTotalPartNum = 0;
-    _passedCutWeightSum = 0;
-  }
-
-
-
-  void DELPHI_1996_S3430090::analyze(const Event& e) {
-    // First, veto on leptonic events by requiring at least 4 charged FS particles
-    const FinalState& fs = applyProjection<FinalState>(e, "FS");
-    const size_t numParticles = fs.particles().size();
-
-    // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
-    if (numParticles < 2) {
-      getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
-      vetoEvent;
-    }
-    getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
-    const double weight = e.weight();
-    _passedCutWeightSum += weight;
-    _weightedTotalPartNum += numParticles * weight;
-
-    // Get beams and average beam momentum
-    const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-    const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
-                                 beams.second.momentum().vector3().mod() ) / 2.0;
-    getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-
-    // Thrusts
-    getLog() << Log::DEBUG << "Calculating thrust" << endl;
-    const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
-    _hist1MinusT->fill(1 - thrust.thrust(), weight); 
-    _histTMajor->fill(thrust.thrustMajor(), weight); 
-    _histTMinor->fill(thrust.thrustMinor(), weight); 
-    _histOblateness->fill(thrust.oblateness(), weight);
-
-    // Jets
-    const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
-    if (durjet.clusterSeq()) {
-      _histDiffRate2Durham->fill(durjet.clusterSeq()->exclusive_ymerge(2), weight); 
-      _histDiffRate3Durham->fill(durjet.clusterSeq()->exclusive_ymerge(3), weight); 
-      _histDiffRate4Durham->fill(durjet.clusterSeq()->exclusive_ymerge(4), weight); 
-    }
-    const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
-    if (jadejet.clusterSeq()) {
-      _histDiffRate2Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(2), weight); 
-      _histDiffRate3Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(3), weight); 
-      _histDiffRate4Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(4), weight); 
-    }
-
-    // Sphericities
-    getLog() << Log::DEBUG << "Calculating sphericity" << endl;
-    const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
-    _histSphericity->fill(sphericity.sphericity(), weight); 
-    _histAplanarity->fill(sphericity.aplanarity(), weight); 
-    _histPlanarity->fill(sphericity.planarity(), weight); 
-
-    // C & D params
-    getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
-    const ParisiTensor& parisi = applyProjection<ParisiTensor>(e, "Parisi");
-    _histCParam->fill(parisi.C(), weight);
-    _histDParam->fill(parisi.D(), weight);
-
-    // Hemispheres
-    getLog() << Log::DEBUG << "Calculating hemisphere variables" << endl;
-    const Hemispheres& hemi = applyProjection<Hemispheres>(e, "Hemispheres");
-    _histHemiMassH->fill(hemi.getScaledM2high(), weight); 
-    _histHemiMassL->fill(hemi.getScaledM2low(), weight); 
-    _histHemiMassD->fill(hemi.getScaledM2diff(), weight); 
-    _histHemiBroadW->fill(hemi.getBmax(), weight); 
-    _histHemiBroadN->fill(hemi.getBmin(), weight); 
-    _histHemiBroadT->fill(hemi.getBsum(), weight); 
-    _histHemiBroadD->fill(hemi.getBdiff(), weight); 
-
-    // Iterate over all the charged final state particles.
-    double Evis = 0.0;
-    double Evis2 = 0.0;
-    getLog() << Log::DEBUG << "About to iterate over charged FS particles" << endl;
-    for (ParticleVector::const_iterator p = fs.particles().begin(); p != fs.particles().end(); ++p) {
-      // Get momentum and energy of each particle.
-      const Vector3 mom3 = p->momentum().vector3();
-      const double energy = p->momentum().E();
-      Evis += energy;
-
-      // Scaled momenta.
-      const double mom = mom3.mod();
-      const double scaledMom = mom/meanBeamMom;
-      const double logInvScaledMom = -std::log(scaledMom);
-      _histLogScaledMom->fill(logInvScaledMom, weight); 
-      _histScaledMom->fill(scaledMom, weight); 
-
-      // Get momenta components w.r.t. thrust and sphericity.
-      const double momT = dot(thrust.thrustAxis(), mom3);
-      const double momS = dot(sphericity.sphericityAxis(), mom3);
-      const double pTinT = dot(mom3, thrust.thrustMajorAxis());
-      const double pToutT = dot(mom3, thrust.thrustMinorAxis());
-      const double pTinS = dot(mom3, sphericity.sphericityMajorAxis());
-      const double pToutS = dot(mom3, sphericity.sphericityMinorAxis());
-      const double pT = sqrt(pow(pTinT, 2) + pow(pToutT, 2));
-      _histPtTIn->fill(fabs(pTinT/GeV), weight);
-      _histPtTOut->fill(fabs(pToutT/GeV), weight);
-      _histPtSIn->fill(fabs(pTinS/GeV), weight);
-      _histPtSOut->fill(fabs(pToutS/GeV), weight);
-      _histPtVsXp->fill(scaledMom, fabs(pT/GeV), weight);
-      _histPtTOutVsXp->fill(scaledMom, fabs(pToutT/GeV), weight);
-
-      // Calculate rapidities w.r.t. thrust and sphericity.
-      const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
-      const double rapidityS = 0.5 * std::log((energy + momS) / (energy - momS));
-      _histRapidityT->fill(rapidityT, weight); 
-      _histRapidityS->fill(rapidityS, weight); 
+  /**
+   * @brief DELPHI event shapes and identified particle spectra
+   * @author Andy Buckley
+   * @author Hendrik Hoeth
+   *
+   * This is the paper which was used for the original PROFESSOR MC tuning
+   * study. It studies a wide range of e+ e- event shape variables, differential
+   * jet rates in the Durham and JADE schemes, and incorporates identified
+   * particle spectra, from other LEP analyses.
+   *
+   *
+   * @par Run conditions
+   *
+   * @arg LEP1 beam energy: \f$ \sqrt{s} = \$f 91.2 GeV
+   * @arg Run with generic QCD events.
+   * @arg No \f$ p_\perp^\text{min} \f$ cutoff is required
+   */
+  class DELPHI_1996_S3430090 : public Analysis {
+  public:
+  
+    /// Constructor
+    DELPHI_1996_S3430090() 
+      : Analysis("DELPHI_1996_S3430090")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
+      addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
+      addProjection(Sphericity(cfs), "Sphericity");
+      addProjection(ParisiTensor(cfs), "Parisi");
+      const Thrust thrust(cfs);
+      addProjection(thrust, "Thrust");
+      addProjection(Hemispheres(thrust), "Hemispheres");
+      _weightedTotalPartNum = 0;
+      _passedCutWeightSum = 0;
     }
-    Evis2 = Evis*Evis;
+    
+    
+    /// @name Analysis methods
+    //@{
 
-    for (ParticleVector::const_iterator p_i = fs.particles().begin(); p_i != fs.particles().end(); ++p_i) {
-      for (ParticleVector::const_iterator p_j = p_i; p_j != fs.particles().end(); ++p_j) {
-        if (p_i == p_j) continue;
-        const Vector3 mom3_i = p_i->momentum().vector3();
-        const Vector3 mom3_j = p_j->momentum().vector3();
-        const double energy_i = p_i->momentum().E();
-        const double energy_j = p_j->momentum().E();
-        const double cosij = dot(mom3_i.unit(), mom3_j.unit());
-        const double eec = (energy_i*energy_j) / Evis2;
-        _histEEC->fill(cosij, eec*weight);
-        _histAEEC->fill( cosij,  eec*weight);
-        _histAEEC->fill(-cosij, -eec*weight);
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+      const double weight = e.weight();
+      _passedCutWeightSum += weight;
+      _weightedTotalPartNum += numParticles * weight;
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      // Thrusts
+      getLog() << Log::DEBUG << "Calculating thrust" << endl;
+      const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
+      _hist1MinusT->fill(1 - thrust.thrust(), weight); 
+      _histTMajor->fill(thrust.thrustMajor(), weight); 
+      _histTMinor->fill(thrust.thrustMinor(), weight); 
+      _histOblateness->fill(thrust.oblateness(), weight);
+      
+      // Jets
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      if (durjet.clusterSeq()) {
+        _histDiffRate2Durham->fill(durjet.clusterSeq()->exclusive_ymerge(2), weight); 
+        _histDiffRate3Durham->fill(durjet.clusterSeq()->exclusive_ymerge(3), weight); 
+        _histDiffRate4Durham->fill(durjet.clusterSeq()->exclusive_ymerge(4), weight); 
+      }
+      const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
+      if (jadejet.clusterSeq()) {
+        _histDiffRate2Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(2), weight); 
+        _histDiffRate3Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(3), weight); 
+        _histDiffRate4Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(4), weight); 
+      }
+      
+      // Sphericities
+      getLog() << Log::DEBUG << "Calculating sphericity" << endl;
+      const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
+      _histSphericity->fill(sphericity.sphericity(), weight); 
+      _histAplanarity->fill(sphericity.aplanarity(), weight); 
+      _histPlanarity->fill(sphericity.planarity(), weight); 
+      
+      // C & D params
+      getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
+      const ParisiTensor& parisi = applyProjection<ParisiTensor>(e, "Parisi");
+      _histCParam->fill(parisi.C(), weight);
+      _histDParam->fill(parisi.D(), weight);
+      
+      // Hemispheres
+      getLog() << Log::DEBUG << "Calculating hemisphere variables" << endl;
+      const Hemispheres& hemi = applyProjection<Hemispheres>(e, "Hemispheres");
+      _histHemiMassH->fill(hemi.getScaledM2high(), weight); 
+      _histHemiMassL->fill(hemi.getScaledM2low(), weight); 
+      _histHemiMassD->fill(hemi.getScaledM2diff(), weight); 
+      _histHemiBroadW->fill(hemi.getBmax(), weight); 
+      _histHemiBroadN->fill(hemi.getBmin(), weight); 
+      _histHemiBroadT->fill(hemi.getBsum(), weight); 
+      _histHemiBroadD->fill(hemi.getBdiff(), weight); 
+      
+      // Iterate over all the charged final state particles.
+      double Evis = 0.0;
+      double Evis2 = 0.0;
+      getLog() << Log::DEBUG << "About to iterate over charged FS particles" << endl;
+      for (ParticleVector::const_iterator p = fs.particles().begin(); p != fs.particles().end(); ++p) {
+        // Get momentum and energy of each particle.
+        const Vector3 mom3 = p->momentum().vector3();
+        const double energy = p->momentum().E();
+        Evis += energy;
+        
+        // Scaled momenta.
+        const double mom = mom3.mod();
+        const double scaledMom = mom/meanBeamMom;
+        const double logInvScaledMom = -std::log(scaledMom);
+        _histLogScaledMom->fill(logInvScaledMom, weight); 
+        _histScaledMom->fill(scaledMom, weight); 
+        
+        // Get momenta components w.r.t. thrust and sphericity.
+        const double momT = dot(thrust.thrustAxis(), mom3);
+        const double momS = dot(sphericity.sphericityAxis(), mom3);
+        const double pTinT = dot(mom3, thrust.thrustMajorAxis());
+        const double pToutT = dot(mom3, thrust.thrustMinorAxis());
+        const double pTinS = dot(mom3, sphericity.sphericityMajorAxis());
+        const double pToutS = dot(mom3, sphericity.sphericityMinorAxis());
+        const double pT = sqrt(pow(pTinT, 2) + pow(pToutT, 2));
+        _histPtTIn->fill(fabs(pTinT/GeV), weight);
+        _histPtTOut->fill(fabs(pToutT/GeV), weight);
+        _histPtSIn->fill(fabs(pTinS/GeV), weight);
+        _histPtSOut->fill(fabs(pToutS/GeV), weight);
+        _histPtVsXp->fill(scaledMom, fabs(pT/GeV), weight);
+        _histPtTOutVsXp->fill(scaledMom, fabs(pToutT/GeV), weight);
+        
+        // Calculate rapidities w.r.t. thrust and sphericity.
+        const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
+        const double rapidityS = 0.5 * std::log((energy + momS) / (energy - momS));
+        _histRapidityT->fill(rapidityT, weight); 
+        _histRapidityS->fill(rapidityS, weight); 
+      }
+      Evis2 = Evis*Evis;
+      
+      for (ParticleVector::const_iterator p_i = fs.particles().begin(); p_i != fs.particles().end(); ++p_i) {
+        for (ParticleVector::const_iterator p_j = p_i; p_j != fs.particles().end(); ++p_j) {
+          if (p_i == p_j) continue;
+          const Vector3 mom3_i = p_i->momentum().vector3();
+          const Vector3 mom3_j = p_j->momentum().vector3();
+          const double energy_i = p_i->momentum().E();
+          const double energy_j = p_j->momentum().E();
+          const double cosij = dot(mom3_i.unit(), mom3_j.unit());
+          const double eec = (energy_i*energy_j) / Evis2;
+          _histEEC->fill(cosij, eec*weight);
+          _histAEEC->fill( cosij,  eec*weight);
+          _histAEEC->fill(-cosij, -eec*weight);
+        }
+      }
+      
+      _histMultiCharged->fill(_histMultiCharged->binMean(0), numParticles*weight);
+      
+      
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+      
+      foreach (const Particle& p, ufs.particles()) {
+        int id = abs(p.pdgId());
+        switch (id) {
+        case 211:
+          _histMultiPiPlus->fill(_histMultiPiPlus->binMean(0), weight);
+          break;
+        case 111:
+          _histMultiPi0->fill(_histMultiPi0->binMean(0), weight);
+          break;
+        case 321:
+          _histMultiKPlus->fill(_histMultiKPlus->binMean(0), weight);
+          break;
+        case 130:
+        case 310:
+          _histMultiK0->fill(_histMultiK0->binMean(0), weight);
+          break;
+        case 221:
+          _histMultiEta->fill(_histMultiEta->binMean(0), weight);
+          break;
+        case 331:
+          _histMultiEtaPrime->fill(_histMultiEtaPrime->binMean(0), weight);
+          break;
+        case 411:
+          _histMultiDPlus->fill(_histMultiDPlus->binMean(0), weight);
+          break;
+        case 421:
+          _histMultiD0->fill(_histMultiD0->binMean(0), weight);
+          break;
+        case 511:
+        case 521:
+        case 531:
+          _histMultiBPlus0->fill(_histMultiBPlus0->binMean(0), weight);
+          break;
+        case 9010221:
+          _histMultiF0->fill(_histMultiF0->binMean(0), weight);
+          break;
+        case 113:
+          _histMultiRho->fill(_histMultiRho->binMean(0), weight);
+          break;
+        case 323:
+          _histMultiKStar892Plus->fill(_histMultiKStar892Plus->binMean(0), weight);
+          break;
+        case 313:
+          _histMultiKStar892_0->fill(_histMultiKStar892_0->binMean(0), weight);
+          break;
+        case 333:
+          _histMultiPhi->fill(_histMultiPhi->binMean(0), weight);
+          break;
+        case 413:
+          _histMultiDStar2010Plus->fill(_histMultiDStar2010Plus->binMean(0), weight);
+          break;
+        case 225:
+          _histMultiF2->fill(_histMultiF2->binMean(0), weight);
+          break;
+        case 315:
+          _histMultiK2Star1430_0->fill(_histMultiK2Star1430_0->binMean(0), weight);
+          break;
+        case 2212:
+          _histMultiP->fill(_histMultiP->binMean(0), weight);
+          break;
+        case 3122:
+          _histMultiLambda0->fill(_histMultiLambda0->binMean(0), weight);
+          break;
+        case 3312:
+          _histMultiXiMinus->fill(_histMultiXiMinus->binMean(0), weight);
+          break;
+        case 3334:
+          _histMultiOmegaMinus->fill(_histMultiOmegaMinus->binMean(0), weight);
+          break;
+        case 2224:
+          _histMultiDeltaPlusPlus->fill(_histMultiDeltaPlusPlus->binMean(0), weight);
+          break;
+        case 3114:
+          _histMultiSigma1385Plus->fill(_histMultiSigma1385Plus->binMean(0), weight);
+          break;
+        case 3324:
+          _histMultiXi1530_0->fill(_histMultiXi1530_0->binMean(0), weight);
+          break;
+        case 5122:
+          _histMultiLambdaB0->fill(_histMultiLambdaB0->binMean(0), weight);
+          break;
+        }
       }
     }
 
-    _histMultiCharged->fill(_histMultiCharged->binMean(0), numParticles*weight);
+
+    void init() {
+      _histPtTIn = bookHistogram1D(1, 1, 1);
+      _histPtTOut = bookHistogram1D(2, 1, 1);
+      _histPtSIn = bookHistogram1D(3, 1, 1);
+      _histPtSOut = bookHistogram1D(4, 1, 1);
+      
+      _histRapidityT = bookHistogram1D(5, 1, 1);
+      _histRapidityS = bookHistogram1D(6, 1, 1);
+      _histScaledMom = bookHistogram1D(7, 1, 1);
+      _histLogScaledMom = bookHistogram1D(8, 1, 1);
+      
+      _histPtTOutVsXp = bookProfile1D(9,  1, 1);
+      _histPtVsXp = bookProfile1D(10, 1, 1);    
+      
+      _hist1MinusT = bookHistogram1D(11, 1, 1);
+      _histTMajor = bookHistogram1D(12, 1, 1);
+      _histTMinor = bookHistogram1D(13, 1, 1);
+      _histOblateness = bookHistogram1D(14, 1, 1);
+      
+      _histSphericity = bookHistogram1D(15, 1, 1);
+      _histAplanarity = bookHistogram1D(16, 1, 1);
+      _histPlanarity = bookHistogram1D(17, 1, 1);
+      
+      _histCParam = bookHistogram1D(18, 1, 1);
+      _histDParam = bookHistogram1D(19, 1, 1);
+      
+      _histHemiMassH = bookHistogram1D(20, 1, 1);
+      _histHemiMassL = bookHistogram1D(21, 1, 1);
+      _histHemiMassD = bookHistogram1D(22, 1, 1);
+      
+      _histHemiBroadW = bookHistogram1D(23, 1, 1);
+      _histHemiBroadN = bookHistogram1D(24, 1, 1);
+      _histHemiBroadT = bookHistogram1D(25, 1, 1);
+      _histHemiBroadD = bookHistogram1D(26, 1, 1);
+
+      // Binned in y_cut
+      _histDiffRate2Durham = bookHistogram1D(27, 1, 1);
+      _histDiffRate2Jade = bookHistogram1D(28, 1, 1);
+      _histDiffRate3Durham = bookHistogram1D(29, 1, 1);
+      _histDiffRate3Jade = bookHistogram1D(30, 1, 1);
+      _histDiffRate4Durham = bookHistogram1D(31, 1, 1);
+      _histDiffRate4Jade = bookHistogram1D(32, 1, 1);
+
+      // Binned in cos(chi)
+      _histEEC = bookHistogram1D(33, 1, 1);
+      _histAEEC = bookHistogram1D(34, 1, 1);
+
+      _histMultiCharged = bookHistogram1D(35, 1, 1);
+
+      _histMultiPiPlus = bookHistogram1D(36, 1, 1);
+      _histMultiPi0 = bookHistogram1D(36, 1, 2);
+      _histMultiKPlus = bookHistogram1D(36, 1, 3);
+      _histMultiK0 = bookHistogram1D(36, 1, 4);
+      _histMultiEta = bookHistogram1D(36, 1, 5);
+      _histMultiEtaPrime = bookHistogram1D(36, 1, 6);
+      _histMultiDPlus = bookHistogram1D(36, 1, 7);
+      _histMultiD0 = bookHistogram1D(36, 1, 8);
+      _histMultiBPlus0 = bookHistogram1D(36, 1, 9);
+
+      _histMultiF0 = bookHistogram1D(37, 1, 1);
+
+      _histMultiRho = bookHistogram1D(38, 1, 1);
+      _histMultiKStar892Plus = bookHistogram1D(38, 1, 2);
+      _histMultiKStar892_0 = bookHistogram1D(38, 1, 3);
+      _histMultiPhi = bookHistogram1D(38, 1, 4);
+      _histMultiDStar2010Plus = bookHistogram1D(38, 1, 5);
+
+      _histMultiF2 = bookHistogram1D(39, 1, 1);
+      _histMultiK2Star1430_0 = bookHistogram1D(39, 1, 2);
+
+      _histMultiP = bookHistogram1D(40, 1, 1);
+      _histMultiLambda0 = bookHistogram1D(40, 1, 2);
+      _histMultiXiMinus = bookHistogram1D(40, 1, 3);
+      _histMultiOmegaMinus = bookHistogram1D(40, 1, 4);
+      _histMultiDeltaPlusPlus = bookHistogram1D(40, 1, 5);
+      _histMultiSigma1385Plus = bookHistogram1D(40, 1, 6);
+      _histMultiXi1530_0 = bookHistogram1D(40, 1, 7);
+      _histMultiLambdaB0 = bookHistogram1D(40, 1, 8);
+    }
 
 
-    // Final state of unstable particles to get particle spectra
-    const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
 
-    foreach (const Particle& p, ufs.particles()) {
-      int id = abs(p.pdgId());
-      switch (id) {
-         case 211:
-            _histMultiPiPlus->fill(_histMultiPiPlus->binMean(0), weight);
-            break;
-         case 111:
-            _histMultiPi0->fill(_histMultiPi0->binMean(0), weight);
-            break;
-         case 321:
-            _histMultiKPlus->fill(_histMultiKPlus->binMean(0), weight);
-            break;
-         case 130:
-         case 310:
-            _histMultiK0->fill(_histMultiK0->binMean(0), weight);
-            break;
-         case 221:
-            _histMultiEta->fill(_histMultiEta->binMean(0), weight);
-            break;
-         case 331:
-            _histMultiEtaPrime->fill(_histMultiEtaPrime->binMean(0), weight);
-            break;
-         case 411:
-            _histMultiDPlus->fill(_histMultiDPlus->binMean(0), weight);
-            break;
-         case 421:
-            _histMultiD0->fill(_histMultiD0->binMean(0), weight);
-            break;
-         case 511:
-         case 521:
-         case 531:
-            _histMultiBPlus0->fill(_histMultiBPlus0->binMean(0), weight);
-            break;
-         case 9010221:
-            _histMultiF0->fill(_histMultiF0->binMean(0), weight);
-            break;
-         case 113:
-            _histMultiRho->fill(_histMultiRho->binMean(0), weight);
-            break;
-         case 323:
-            _histMultiKStar892Plus->fill(_histMultiKStar892Plus->binMean(0), weight);
-            break;
-         case 313:
-            _histMultiKStar892_0->fill(_histMultiKStar892_0->binMean(0), weight);
-            break;
-         case 333:
-            _histMultiPhi->fill(_histMultiPhi->binMean(0), weight);
-            break;
-         case 413:
-            _histMultiDStar2010Plus->fill(_histMultiDStar2010Plus->binMean(0), weight);
-            break;
-         case 225:
-            _histMultiF2->fill(_histMultiF2->binMean(0), weight);
-            break;
-         case 315:
-            _histMultiK2Star1430_0->fill(_histMultiK2Star1430_0->binMean(0), weight);
-            break;
-         case 2212:
-            _histMultiP->fill(_histMultiP->binMean(0), weight);
-            break;
-         case 3122:
-            _histMultiLambda0->fill(_histMultiLambda0->binMean(0), weight);
-            break;
-         case 3312:
-            _histMultiXiMinus->fill(_histMultiXiMinus->binMean(0), weight);
-            break;
-         case 3334:
-            _histMultiOmegaMinus->fill(_histMultiOmegaMinus->binMean(0), weight);
-            break;
-         case 2224:
-            _histMultiDeltaPlusPlus->fill(_histMultiDeltaPlusPlus->binMean(0), weight);
-            break;
-         case 3114:
-            _histMultiSigma1385Plus->fill(_histMultiSigma1385Plus->binMean(0), weight);
-            break;
-         case 3324:
-            _histMultiXi1530_0->fill(_histMultiXi1530_0->binMean(0), weight);
-            break;
-         case 5122:
-            _histMultiLambdaB0->fill(_histMultiLambdaB0->binMean(0), weight);
-            break;
-      }
+    // Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      const double avgNumParts = _weightedTotalPartNum / _passedCutWeightSum;
+
+      normalize(_histPtTIn, avgNumParts);
+      normalize(_histPtTOut, avgNumParts); 
+      normalize(_histPtSIn, avgNumParts);
+      normalize(_histPtSOut, avgNumParts); 
+
+      normalize(_histRapidityT, avgNumParts); 
+      normalize(_histRapidityS, avgNumParts); 
+
+      normalize(_histLogScaledMom, avgNumParts);
+      normalize(_histScaledMom, avgNumParts); 
+
+      scale(_histEEC, 1.0/_passedCutWeightSum);
+      scale(_histAEEC, 1.0/_passedCutWeightSum);
+      scale(_histMultiCharged, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiPiPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiPi0, 1.0/_passedCutWeightSum);
+      scale(_histMultiKPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiK0, 1.0/_passedCutWeightSum);
+      scale(_histMultiEta, 1.0/_passedCutWeightSum);
+      scale(_histMultiEtaPrime, 1.0/_passedCutWeightSum);
+      scale(_histMultiDPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiD0, 1.0/_passedCutWeightSum);
+      scale(_histMultiBPlus0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiF0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiRho, 1.0/_passedCutWeightSum);
+      scale(_histMultiKStar892Plus, 1.0/_passedCutWeightSum);
+      scale(_histMultiKStar892_0, 1.0/_passedCutWeightSum);
+      scale(_histMultiPhi, 1.0/_passedCutWeightSum);
+      scale(_histMultiDStar2010Plus, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiF2, 1.0/_passedCutWeightSum);
+      scale(_histMultiK2Star1430_0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiP, 1.0/_passedCutWeightSum);
+      scale(_histMultiLambda0, 1.0/_passedCutWeightSum);
+      scale(_histMultiXiMinus, 1.0/_passedCutWeightSum);
+      scale(_histMultiOmegaMinus, 1.0/_passedCutWeightSum);
+      scale(_histMultiDeltaPlusPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiSigma1385Plus, 1.0/_passedCutWeightSum);
+      scale(_histMultiXi1530_0, 1.0/_passedCutWeightSum);
+      scale(_histMultiLambdaB0, 1.0/_passedCutWeightSum);
+
+      normalize(_hist1MinusT); 
+      normalize(_histTMajor); 
+      normalize(_histTMinor); 
+      normalize(_histOblateness); 
+
+      normalize(_histSphericity); 
+      normalize(_histAplanarity); 
+      normalize(_histPlanarity); 
+
+      normalize(_histHemiMassD); 
+      normalize(_histHemiMassH); 
+      normalize(_histHemiMassL); 
+
+      normalize(_histHemiBroadW); 
+      normalize(_histHemiBroadN); 
+      normalize(_histHemiBroadT); 
+      normalize(_histHemiBroadD); 
+
+      normalize(_histCParam); 
+      normalize(_histDParam); 
+
+      normalize(_histDiffRate2Durham); 
+      normalize(_histDiffRate2Jade); 
+      normalize(_histDiffRate3Durham);
+      normalize(_histDiffRate3Jade); 
+      normalize(_histDiffRate4Durham);
+      normalize(_histDiffRate4Jade); 
     }
 
-  }
+    //@}
 
 
-  void DELPHI_1996_S3430090::init() {
-    _histPtTIn = bookHistogram1D(1, 1, 1);
-    _histPtTOut = bookHistogram1D(2, 1, 1);
-    _histPtSIn = bookHistogram1D(3, 1, 1);
-    _histPtSOut = bookHistogram1D(4, 1, 1);
-    
-    _histRapidityT = bookHistogram1D(5, 1, 1);
-    _histRapidityS = bookHistogram1D(6, 1, 1);
-    _histScaledMom = bookHistogram1D(7, 1, 1);
-    _histLogScaledMom = bookHistogram1D(8, 1, 1);
-    
-    _histPtTOutVsXp = bookProfile1D(9,  1, 1);
-    _histPtVsXp = bookProfile1D(10, 1, 1);    
-
-    _hist1MinusT = bookHistogram1D(11, 1, 1);
-    _histTMajor = bookHistogram1D(12, 1, 1);
-    _histTMinor = bookHistogram1D(13, 1, 1);
-    _histOblateness = bookHistogram1D(14, 1, 1);
-    
-    _histSphericity = bookHistogram1D(15, 1, 1);
-    _histAplanarity = bookHistogram1D(16, 1, 1);
-    _histPlanarity = bookHistogram1D(17, 1, 1);
-    
-    _histCParam = bookHistogram1D(18, 1, 1);
-    _histDParam = bookHistogram1D(19, 1, 1);
-    
-    _histHemiMassH = bookHistogram1D(20, 1, 1);
-    _histHemiMassL = bookHistogram1D(21, 1, 1);
-    _histHemiMassD = bookHistogram1D(22, 1, 1);
-
-    _histHemiBroadW = bookHistogram1D(23, 1, 1);
-    _histHemiBroadN = bookHistogram1D(24, 1, 1);
-    _histHemiBroadT = bookHistogram1D(25, 1, 1);
-    _histHemiBroadD = bookHistogram1D(26, 1, 1);
-
-    // Binned in y_cut
-    _histDiffRate2Durham = bookHistogram1D(27, 1, 1);
-    _histDiffRate2Jade = bookHistogram1D(28, 1, 1);
-    _histDiffRate3Durham = bookHistogram1D(29, 1, 1);
-    _histDiffRate3Jade = bookHistogram1D(30, 1, 1);
-    _histDiffRate4Durham = bookHistogram1D(31, 1, 1);
-    _histDiffRate4Jade = bookHistogram1D(32, 1, 1);
-
-    // Binned in cos(chi)
-    _histEEC = bookHistogram1D(33, 1, 1);
-    _histAEEC = bookHistogram1D(34, 1, 1);
+  private:
 
-    _histMultiCharged = bookHistogram1D(35, 1, 1);
-    
-    _histMultiPiPlus = bookHistogram1D(36, 1, 1);
-    _histMultiPi0 = bookHistogram1D(36, 1, 2);
-    _histMultiKPlus = bookHistogram1D(36, 1, 3);
-    _histMultiK0 = bookHistogram1D(36, 1, 4);
-    _histMultiEta = bookHistogram1D(36, 1, 5);
-    _histMultiEtaPrime = bookHistogram1D(36, 1, 6);
-    _histMultiDPlus = bookHistogram1D(36, 1, 7);
-    _histMultiD0 = bookHistogram1D(36, 1, 8);
-    _histMultiBPlus0 = bookHistogram1D(36, 1, 9);
-    
-    _histMultiF0 = bookHistogram1D(37, 1, 1);
-    
-    _histMultiRho = bookHistogram1D(38, 1, 1);
-    _histMultiKStar892Plus = bookHistogram1D(38, 1, 2);
-    _histMultiKStar892_0 = bookHistogram1D(38, 1, 3);
-    _histMultiPhi = bookHistogram1D(38, 1, 4);
-    _histMultiDStar2010Plus = bookHistogram1D(38, 1, 5);
-    
-    _histMultiF2 = bookHistogram1D(39, 1, 1);
-    _histMultiK2Star1430_0 = bookHistogram1D(39, 1, 2);
-    
-    _histMultiP = bookHistogram1D(40, 1, 1);
-    _histMultiLambda0 = bookHistogram1D(40, 1, 2);
-    _histMultiXiMinus = bookHistogram1D(40, 1, 3);
-    _histMultiOmegaMinus = bookHistogram1D(40, 1, 4);
-    _histMultiDeltaPlusPlus = bookHistogram1D(40, 1, 5);
-    _histMultiSigma1385Plus = bookHistogram1D(40, 1, 6);
-    _histMultiXi1530_0 = bookHistogram1D(40, 1, 7);
-    _histMultiLambdaB0 = bookHistogram1D(40, 1, 8);
-  }
-  
-  
-  
-  // Finalize
-  void DELPHI_1996_S3430090::finalize() { 
-    // Normalize inclusive single particle distributions to the average number 
-    // of charged particles per event.
-    const double avgNumParts = _weightedTotalPartNum / _passedCutWeightSum;
-
-    normalize(_histPtTIn, avgNumParts);
-    normalize(_histPtTOut, avgNumParts); 
-    normalize(_histPtSIn, avgNumParts);
-    normalize(_histPtSOut, avgNumParts); 
-
-    normalize(_histRapidityT, avgNumParts); 
-    normalize(_histRapidityS, avgNumParts); 
-
-    normalize(_histLogScaledMom, avgNumParts);
-    normalize(_histScaledMom, avgNumParts); 
-
-    scale(_histEEC, 1.0/_passedCutWeightSum);
-    scale(_histAEEC, 1.0/_passedCutWeightSum);
-    scale(_histMultiCharged, 1.0/_passedCutWeightSum);
-
-    scale(_histMultiPiPlus, 1.0/_passedCutWeightSum);
-    scale(_histMultiPi0, 1.0/_passedCutWeightSum);
-    scale(_histMultiKPlus, 1.0/_passedCutWeightSum);
-    scale(_histMultiK0, 1.0/_passedCutWeightSum);
-    scale(_histMultiEta, 1.0/_passedCutWeightSum);
-    scale(_histMultiEtaPrime, 1.0/_passedCutWeightSum);
-    scale(_histMultiDPlus, 1.0/_passedCutWeightSum);
-    scale(_histMultiD0, 1.0/_passedCutWeightSum);
-    scale(_histMultiBPlus0, 1.0/_passedCutWeightSum);
-
-    scale(_histMultiF0, 1.0/_passedCutWeightSum);
-
-    scale(_histMultiRho, 1.0/_passedCutWeightSum);
-    scale(_histMultiKStar892Plus, 1.0/_passedCutWeightSum);
-    scale(_histMultiKStar892_0, 1.0/_passedCutWeightSum);
-    scale(_histMultiPhi, 1.0/_passedCutWeightSum);
-    scale(_histMultiDStar2010Plus, 1.0/_passedCutWeightSum);
-
-    scale(_histMultiF2, 1.0/_passedCutWeightSum);
-    scale(_histMultiK2Star1430_0, 1.0/_passedCutWeightSum);
-
-    scale(_histMultiP, 1.0/_passedCutWeightSum);
-    scale(_histMultiLambda0, 1.0/_passedCutWeightSum);
-    scale(_histMultiXiMinus, 1.0/_passedCutWeightSum);
-    scale(_histMultiOmegaMinus, 1.0/_passedCutWeightSum);
-    scale(_histMultiDeltaPlusPlus, 1.0/_passedCutWeightSum);
-    scale(_histMultiSigma1385Plus, 1.0/_passedCutWeightSum);
-    scale(_histMultiXi1530_0, 1.0/_passedCutWeightSum);
-    scale(_histMultiLambdaB0, 1.0/_passedCutWeightSum);
-
-    normalize(_hist1MinusT); 
-    normalize(_histTMajor); 
-    normalize(_histTMinor); 
-    normalize(_histOblateness); 
-
-    normalize(_histSphericity); 
-    normalize(_histAplanarity); 
-    normalize(_histPlanarity); 
-
-    normalize(_histHemiMassD); 
-    normalize(_histHemiMassH); 
-    normalize(_histHemiMassL); 
-
-    normalize(_histHemiBroadW); 
-    normalize(_histHemiBroadN); 
-    normalize(_histHemiBroadT); 
-    normalize(_histHemiBroadD); 
-
-    normalize(_histCParam); 
-    normalize(_histDParam); 
-
-    normalize(_histDiffRate2Durham); 
-    normalize(_histDiffRate2Jade); 
-    normalize(_histDiffRate3Durham);
-    normalize(_histDiffRate3Jade); 
-    normalize(_histDiffRate4Durham);
-    normalize(_histDiffRate4Jade); 
-  }
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalPartNum;
+
+    double _passedCutWeightSum;
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histPtTIn;
+    AIDA::IHistogram1D *_histPtTOut;
+    AIDA::IHistogram1D *_histPtSIn;
+    AIDA::IHistogram1D *_histPtSOut;
+
+    AIDA::IHistogram1D *_histRapidityT;
+    AIDA::IHistogram1D *_histRapidityS;
+
+    AIDA::IHistogram1D *_histScaledMom, *_histLogScaledMom;
+
+    AIDA::IProfile1D   *_histPtTOutVsXp, *_histPtVsXp;
+
+    AIDA::IHistogram1D *_hist1MinusT; 
+    AIDA::IHistogram1D *_histTMajor; 
+    AIDA::IHistogram1D *_histTMinor; 
+    AIDA::IHistogram1D *_histOblateness; 
+
+    AIDA::IHistogram1D *_histSphericity;
+    AIDA::IHistogram1D *_histAplanarity;
+    AIDA::IHistogram1D *_histPlanarity;
+
+    AIDA::IHistogram1D *_histCParam;
+    AIDA::IHistogram1D *_histDParam;
+
+    AIDA::IHistogram1D *_histHemiMassD;
+    AIDA::IHistogram1D *_histHemiMassH;
+    AIDA::IHistogram1D *_histHemiMassL;
+               
+    AIDA::IHistogram1D *_histHemiBroadW;
+    AIDA::IHistogram1D *_histHemiBroadN;
+    AIDA::IHistogram1D *_histHemiBroadT;
+    AIDA::IHistogram1D *_histHemiBroadD;
+
+    AIDA::IHistogram1D *_histDiffRate2Durham;
+    AIDA::IHistogram1D *_histDiffRate2Jade; 
+    AIDA::IHistogram1D *_histDiffRate3Durham;
+    AIDA::IHistogram1D *_histDiffRate3Jade;
+    AIDA::IHistogram1D *_histDiffRate4Durham;
+    AIDA::IHistogram1D *_histDiffRate4Jade;
+
+    AIDA::IHistogram1D *_histEEC, *_histAEEC;
+
+    AIDA::IHistogram1D *_histMultiCharged;
+
+    AIDA::IHistogram1D *_histMultiPiPlus;
+    AIDA::IHistogram1D *_histMultiPi0;
+    AIDA::IHistogram1D *_histMultiKPlus;
+    AIDA::IHistogram1D *_histMultiK0;
+    AIDA::IHistogram1D *_histMultiEta;
+    AIDA::IHistogram1D *_histMultiEtaPrime;
+    AIDA::IHistogram1D *_histMultiDPlus;
+    AIDA::IHistogram1D *_histMultiD0;
+    AIDA::IHistogram1D *_histMultiBPlus0;
+
+    AIDA::IHistogram1D *_histMultiF0;
+
+    AIDA::IHistogram1D *_histMultiRho;
+    AIDA::IHistogram1D *_histMultiKStar892Plus;
+    AIDA::IHistogram1D *_histMultiKStar892_0;
+    AIDA::IHistogram1D *_histMultiPhi;
+    AIDA::IHistogram1D *_histMultiDStar2010Plus;
+
+    AIDA::IHistogram1D *_histMultiF2;
+    AIDA::IHistogram1D *_histMultiK2Star1430_0;
+
+    AIDA::IHistogram1D *_histMultiP;
+    AIDA::IHistogram1D *_histMultiLambda0;
+    AIDA::IHistogram1D *_histMultiXiMinus;
+    AIDA::IHistogram1D *_histMultiOmegaMinus;
+    AIDA::IHistogram1D *_histMultiDeltaPlusPlus;
+    AIDA::IHistogram1D *_histMultiSigma1385Plus;
+    AIDA::IHistogram1D *_histMultiXi1530_0;
+    AIDA::IHistogram1D *_histMultiLambdaB0;
+    //@}
 
+  };
 
 
 

Modified: trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,108 +1,130 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/DELPHI_2002_069_CONF_603.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/Beam.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 #include "Rivet/Projections/InitialQuarks.hh"
 
-/// @todo Use inline function rather than preprocessor, and use ParticleName enum values.
-/// @todo Put these in a PID utility header? (isParton(id) would be a good candidate for inclusion in HepMC)
+
+/// @todo Use inline PID functions instead
 #define IS_PARTON_PDGID(id) ( abs(id) <= 100 && abs(id) != 22 && (abs(id) < 11 || abs(id) > 18) )
 #define IS_BHADRON_PDGID(id) ( ((abs(id)/100)%10 == 5) || (abs(id) >= 5000 && abs(id) <= 5999) )
 
 namespace Rivet {
 
 
-  // Constructor
-  DELPHI_2002_069_CONF_603::DELPHI_2002_069_CONF_603() 
-    : Analysis("DELPHI_2002_069_CONF_603")
-  {
-    setBeams(ELECTRON, POSITRON); 
-    addProjection(Beam(), "Beams");
-    addProjection(ChargedFinalState(), "FS");
-    addProjection(InitialQuarks(), "IQF");
-  }
-
-
-  void DELPHI_2002_069_CONF_603::analyze(const Event& e) {
-    // First, veto on leptonic events by requiring at least 4 charged FS particles
-    const FinalState& fs = applyProjection<FinalState>(e, "FS");
-    const size_t numParticles = fs.particles().size();
-
-    // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
-    if (numParticles < 2) {
-      getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
-      vetoEvent;
+  /// @brief DELPHI b-fragmentation measurement
+  /// @author Hendrik Hoeth
+  class DELPHI_2002_069_CONF_603 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_2002_069_CONF_603() 
+      : Analysis("DELPHI_2002_069_CONF_603")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(InitialQuarks(), "IQF");
     }
-    getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
 
-    // Get event weight for histo filling
-    const double weight = e.weight();
 
-    // Get beams and average beam momentum
-    const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-    const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
-                                 beams.second.momentum().vector3().mod() ) / 2.0;
-    getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-
-
-    for (GenEvent::particle_const_iterator p = e.genEvent().particles_begin();
-         p != e.genEvent().particles_end(); ++p) {
-      const GenVertex* pv = (*p)->production_vertex();
-      const GenVertex* dv = (*p)->end_vertex();
-      if (IS_BHADRON_PDGID((*p)->pdg_id())) {
-        const double xp = (*p)->momentum().e()/meanBeamMom;
-
-        // If the B-hadron has a parton as parent, call it primary B-hadron:
-        if (pv!=NULL) {
-          bool is_primary = false;
-          for (GenVertex::particles_in_const_iterator pp = pv->particles_in_const_begin() ;
-              pp != pv->particles_in_const_end() ; ++pp) {
-            if (IS_PARTON_PDGID((*pp)->pdg_id()))
-              is_primary = true;
-          }
-          if (is_primary) {
-            _histXbprim->fill(xp, weight);
-            _histMeanXbprim->fill(_histMeanXbprim->binMean(0), xp, weight);
-          }
-        }
+    /// @name Analysis methods
+    //@{
 
-        // If the B-hadron has no B-hadron as a child, it decayed weakly:
-        if (dv!=NULL) {
-          bool is_weak = true;
-          for (GenVertex::particles_out_const_iterator pp = dv->particles_out_const_begin() ;
-              pp != dv->particles_out_const_end() ; ++pp) {
-            if (IS_BHADRON_PDGID((*pp)->pdg_id())) {
-              is_weak = false;
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      
+      foreach (const GenParticle* p, particles(e.genEvent())) {
+        const GenVertex* pv = p->production_vertex();
+        const GenVertex* dv = p->end_vertex();
+        if (IS_BHADRON_PDGID(p->pdg_id())) {
+          const double xp = p->momentum().e()/meanBeamMom;
+          
+          // If the B-hadron has a parton as parent, call it primary B-hadron:
+          if (pv) {
+            bool is_primary = false;
+            for (GenVertex::particles_in_const_iterator pp = pv->particles_in_const_begin(); pp != pv->particles_in_const_end() ; ++pp) {
+              if (IS_PARTON_PDGID((*pp)->pdg_id())) is_primary = true;
+            }
+            if (is_primary) {
+              _histXbprim->fill(xp, weight);
+              _histMeanXbprim->fill(_histMeanXbprim->binMean(0), xp, weight);
             }
           }
-          if (is_weak) {
-            _histXbweak->fill(xp, weight);
-            _histMeanXbweak->fill(_histMeanXbweak->binMean(0), xp, weight);
+          
+          // If the B-hadron has no B-hadron as a child, it decayed weakly:
+          if (dv) {
+            bool is_weak = true;
+            for (GenVertex::particles_out_const_iterator pp = dv->particles_out_const_begin() ;
+                 pp != dv->particles_out_const_end() ; ++pp) {
+              if (IS_BHADRON_PDGID((*pp)->pdg_id())) {
+                is_weak = false;
+              }
+            }
+            if (is_weak) {
+              _histXbweak->fill(xp, weight);
+              _histMeanXbweak->fill(_histMeanXbweak->binMean(0), xp, weight);
+            }
           }
+          
         }
-
       }
     }
-  }
+      
+    
+    /// Book histograms      
+    void init() {
+      _histXbprim     = bookHistogram1D(1, 1, 1);
+      _histXbweak     = bookHistogram1D(2, 1, 1);
+      _histMeanXbprim = bookProfile1D(4, 1, 1);
+      _histMeanXbweak = bookProfile1D(5, 1, 1);
+    }
+    
+
+    // Finalize
+    void finalize() {
+      normalize(_histXbprim);
+      normalize(_histXbweak);
+    }
+
+
+  private:
+
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+
+    AIDA::IHistogram1D *_histXbprim;
+    AIDA::IHistogram1D *_histXbweak;
 
+    AIDA::IProfile1D *_histMeanXbprim;
+    AIDA::IProfile1D *_histMeanXbweak;
 
+    //@}
 
-  void DELPHI_2002_069_CONF_603::init() {
-    _histXbprim     = bookHistogram1D(1, 1, 1);
-    _histXbweak     = bookHistogram1D(2, 1, 1);
-    _histMeanXbprim = bookProfile1D(4, 1, 1);
-    _histMeanXbweak = bookProfile1D(5, 1, 1);
-  }
-
-  // Finalize
-  void DELPHI_2002_069_CONF_603::finalize() {
-    normalize(_histXbprim);
-    normalize(_histXbweak);
-  }
+  };
 
 
 

Modified: trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,7 +1,6 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/DELPHI_2003_WUD_03_11.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/FastJets.hh"
 #include "Rivet/Projections/FinalState.hh"
@@ -10,128 +9,192 @@
 namespace Rivet {
 
 
-  // Constructor
-  DELPHI_2003_WUD_03_11::DELPHI_2003_WUD_03_11() 
-    : Analysis("DELPHI_2003_WUD_03_11")
-  {
-    const ChargedFinalState cfs;
-    addProjection(cfs, "FS");
-    addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
-    addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
-    _numdurjets = 0;
-    _numjadejets = 0;
-  }
-
-
-  double DELPHI_2003_WUD_03_11::calc_BZ(std::vector<fastjet::PseudoJet> jets) {
-    assert(jets.size()==4);
-    Vector3 p12 = cross( momentum3(jets[0]), momentum3(jets[1]));
-    Vector3 p34 = cross( momentum3(jets[2]), momentum3(jets[3]));
-    return dot(p12,p34) / (p12.mod()*p34.mod());
-  }
-
-  double DELPHI_2003_WUD_03_11::calc_KSW(std::vector<fastjet::PseudoJet> jets) {
-    assert(jets.size()==4);
-    Vector3 p13 = cross( momentum3(jets[0]), momentum3(jets[2]));
-    Vector3 p24 = cross( momentum3(jets[1]), momentum3(jets[3]));
-    Vector3 p14 = cross( momentum3(jets[0]), momentum3(jets[3]));
-    Vector3 p23 = cross( momentum3(jets[1]), momentum3(jets[2]));
-    return cos (0.5*( acos (dot(p14,p23) / (p14.mod()*p23.mod())) +
-                      acos (dot(p13,p24) / (p13.mod()*p24.mod())) ));
-  }
-
-  double DELPHI_2003_WUD_03_11::calc_NR(std::vector<fastjet::PseudoJet> jets) {
-    assert(jets.size()==4);
-    Vector3 p12 = momentum3(jets[0]) - momentum3(jets[1]);
-    Vector3 p34 = momentum3(jets[2]) - momentum3(jets[3]);
-    return dot(p12,p34) / (p12.mod()*p34.mod());
-  }
-
-  double DELPHI_2003_WUD_03_11::calc_ALPHA34(std::vector<fastjet::PseudoJet> jets) {
-    assert(jets.size()==4);
-    Vector3 p3 = momentum3(jets[2]);
-    Vector3 p4 = momentum3(jets[3]);
-    return dot(p3,p4) / (p3.mod()*p4.mod());
-  }
-
-  void DELPHI_2003_WUD_03_11::analyze(const Event& e) {
-    // First, veto on leptonic events by requiring at least 4 charged FS particles
-    const FinalState& fs = applyProjection<FinalState>(e, "FS");
-    const size_t numParticles = fs.particles().size();
-
-    // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
-    if (numParticles < 2) {
-      getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
-      vetoEvent;
-    }
-    getLog() << Log::DEBUG << "Passed multiplicity cut" << endl;
-
-    // Get event weight for histo filling
-    const double weight = e.weight();
-
-    // Jets
-    const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
-    std::vector<fastjet::PseudoJet> jets_durham;
-    if (durjet.clusterSeq()) {
-      jets_durham = fastjet::sorted_by_E(durjet.clusterSeq()->exclusive_jets_ycut(0.008));
-      if (jets_durham.size()==4) {
-        _histDurhamBZ->fill(fabs(calc_BZ(jets_durham)), weight);
-        _histDurhamKSW->fill(calc_KSW(jets_durham), weight);
-        _histDurhamNR->fill(fabs(calc_NR(jets_durham)), weight);
-        _histDurhamALPHA34->fill(calc_ALPHA34(jets_durham), weight);
-      }
-      if (durjet.clusterSeq()->exclusive_ymerge(3) > 0.008 && durjet.clusterSeq()->exclusive_ymerge(4) < 0.008)
-        _numdurjets++;
+  /**
+   * @brief DELPHI 4-jet angular distributions
+   * @author Hendrik Hoeth
+   *
+   * This is Hendrik Hoeth's Diploma thesis, measuring the 4-jet angular
+   * distributions at LEP-1.
+   *
+   *
+   * @par Run conditions
+   *
+   * @arg LEP1 beam energy: \f$ \sqrt{s} = \$f 91.2 GeV
+   * @arg Run with generic QCD events.
+   * @arg No \f$ p_\perp^\text{min} \f$ cutoff is required
+   */
+  class DELPHI_2003_WUD_03_11 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_2003_WUD_03_11()
+      : Analysis("DELPHI_2003_WUD_03_11")
+    {
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
+      addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
+      _numdurjets = 0;
+      _numjadejets = 0;
     }
+    
 
-    const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
-    std::vector<fastjet::PseudoJet> jets_jade;
-    if (jadejet.clusterSeq()) {
-      jets_jade = fastjet::sorted_by_E(jadejet.clusterSeq()->exclusive_jets_ycut(0.015));
-      if (jets_jade.size()==4) {
-        _histJadeBZ->fill(fabs(calc_BZ(jets_jade)), weight);
-        _histJadeKSW->fill(calc_KSW(jets_jade), weight);
-        _histJadeNR->fill(fabs(calc_NR(jets_jade)), weight);
-        _histJadeALPHA34->fill(calc_ALPHA34(jets_jade), weight);
-      }
-      if (jadejet.clusterSeq()->exclusive_ymerge(3) > 0.015 && jadejet.clusterSeq()->exclusive_ymerge(4) < 0.015)
-        _numjadejets++;
+
+    /// @name Jet angle calculator functions
+    /// @todo These shouldn't be object methods, as they have no state!
+    //@{
+    
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils?
+    double calc_BZ(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p12 = cross( momentum3(jets[0]), momentum3(jets[1]));
+      Vector3 p34 = cross( momentum3(jets[2]), momentum3(jets[3]));
+      return dot(p12,p34) / (p12.mod()*p34.mod());
+    }
+
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_KSW(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p13 = cross( momentum3(jets[0]), momentum3(jets[2]));
+      Vector3 p24 = cross( momentum3(jets[1]), momentum3(jets[3]));
+      Vector3 p14 = cross( momentum3(jets[0]), momentum3(jets[3]));
+      Vector3 p23 = cross( momentum3(jets[1]), momentum3(jets[2]));
+      return cos (0.5*( acos (dot(p14,p23) / (p14.mod()*p23.mod())) +
+                        acos (dot(p13,p24) / (p13.mod()*p24.mod())) ));
+    }
+    
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_NR(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p12 = momentum3(jets[0]) - momentum3(jets[1]);
+      Vector3 p34 = momentum3(jets[2]) - momentum3(jets[3]);
+      return dot(p12,p34) / (p12.mod()*p34.mod());
+    }
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_ALPHA34(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p3 = momentum3(jets[2]);
+      Vector3 p4 = momentum3(jets[3]);
+      return dot(p3,p4) / (p3.mod()*p4.mod());
     }
 
-  }
+    //@}
+
+
+
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed multiplicity cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Jets
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      vector<fastjet::PseudoJet> jets_durham;
+      if (durjet.clusterSeq()) {
+        jets_durham = fastjet::sorted_by_E(durjet.clusterSeq()->exclusive_jets_ycut(0.008));
+        if (jets_durham.size() == 4) {
+          _histDurhamBZ->fill(fabs(calc_BZ(jets_durham)), weight);
+          _histDurhamKSW->fill(calc_KSW(jets_durham), weight);
+          _histDurhamNR->fill(fabs(calc_NR(jets_durham)), weight);
+          _histDurhamALPHA34->fill(calc_ALPHA34(jets_durham), weight);
+        }
+        if (durjet.clusterSeq()->exclusive_ymerge(3) > 0.008 && 
+            durjet.clusterSeq()->exclusive_ymerge(4) < 0.008) {
+          _numdurjets++;
+        }
+      }
+      
+      const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
+      vector<fastjet::PseudoJet> jets_jade;
+      if (jadejet.clusterSeq()) {
+        jets_jade = fastjet::sorted_by_E(jadejet.clusterSeq()->exclusive_jets_ycut(0.015));
+        if (jets_jade.size() == 4) {
+          _histJadeBZ->fill(fabs(calc_BZ(jets_jade)), weight);
+          _histJadeKSW->fill(calc_KSW(jets_jade), weight);
+          _histJadeNR->fill(fabs(calc_NR(jets_jade)), weight);
+          _histJadeALPHA34->fill(calc_ALPHA34(jets_jade), weight);
+        }
+        if (jadejet.clusterSeq()->exclusive_ymerge(3) > 0.015 && 
+            jadejet.clusterSeq()->exclusive_ymerge(4) < 0.015) {
+          _numjadejets++;
+        }
+      }
+      
+    }
+    
+    
+    
+    void init() {
+      _histDurhamBZ      = bookHistogram1D(1, 1, 1);
+      _histDurhamKSW     = bookHistogram1D(2, 1, 1);
+      _histDurhamNR      = bookHistogram1D(3, 1, 1);
+      _histDurhamALPHA34 = bookHistogram1D(4, 1, 1);
+      _histJadeBZ        = bookHistogram1D(1, 2, 1);
+      _histJadeKSW       = bookHistogram1D(2, 2, 1);
+      _histJadeNR        = bookHistogram1D(3, 2, 1);
+      _histJadeALPHA34   = bookHistogram1D(4, 2, 1);
+    }
+    
+    
+    
+    // Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      
+      getLog() << Log::INFO << "Number of Durham jets = " << _numdurjets << endl;
+      getLog() << Log::INFO << "Number of Jade jets   = " << _numjadejets << endl;
+      normalize(_histDurhamBZ      , 0.0785);
+      normalize(_histDurhamKSW     , 0.0785);
+      normalize(_histDurhamNR      , 0.0785);
+      normalize(_histDurhamALPHA34 , 0.0785);
+      normalize(_histJadeBZ        , 0.0277);
+      normalize(_histJadeKSW       , 0.0277);
+      normalize(_histJadeNR        , 0.0277);
+      normalize(_histJadeALPHA34   , 0.0277);
+    }
 
+    //@}
 
 
-  void DELPHI_2003_WUD_03_11::init() {
-    _histDurhamBZ      = bookHistogram1D(1, 1, 1);
-    _histDurhamKSW     = bookHistogram1D(2, 1, 1);
-    _histDurhamNR      = bookHistogram1D(3, 1, 1);
-    _histDurhamALPHA34 = bookHistogram1D(4, 1, 1);
-    _histJadeBZ        = bookHistogram1D(1, 2, 1);
-    _histJadeKSW       = bookHistogram1D(2, 2, 1);
-    _histJadeNR        = bookHistogram1D(3, 2, 1);
-    _histJadeALPHA34   = bookHistogram1D(4, 2, 1);
-  }
+  private:
 
+    int _numdurjets;
+    int _numjadejets;
 
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histDurhamBZ;
+    AIDA::IHistogram1D *_histDurhamKSW;
+    AIDA::IHistogram1D *_histDurhamNR;
+    AIDA::IHistogram1D *_histDurhamALPHA34;
+    AIDA::IHistogram1D *_histJadeBZ;
+    AIDA::IHistogram1D *_histJadeKSW;
+    AIDA::IHistogram1D *_histJadeNR;
+    AIDA::IHistogram1D *_histJadeALPHA34;
+    //@}
 
-  // Finalize
-  void DELPHI_2003_WUD_03_11::finalize() { 
-    // Normalize inclusive single particle distributions to the average number 
-    // of charged particles per event.
-
-    getLog() << Log::WARN << "numdurjets   = " << _numdurjets << endl;
-    getLog() << Log::WARN << "numjadejets  = " << _numjadejets << endl;
-    getLog() << Log::WARN << "sumofweights = " << sumOfWeights() << endl;
-    normalize(_histDurhamBZ      , 0.0785);
-    normalize(_histDurhamKSW     , 0.0785);
-    normalize(_histDurhamNR      , 0.0785);
-    normalize(_histDurhamALPHA34 , 0.0785);
-    normalize(_histJadeBZ        , 0.0277);
-    normalize(_histJadeKSW       , 0.0277);
-    normalize(_histJadeNR        , 0.0277);
-    normalize(_histJadeALPHA34   , 0.0277);
-  }
+  };
 
 
 

Modified: trunk/src/Analyses/H1_1994_S2919893.cc
==============================================================================
--- trunk/src/Analyses/H1_1994_S2919893.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/H1_1994_S2919893.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,211 +1,254 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh" 
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Math/Constants.hh"
-#include "Rivet/Analyses/H1_1994_S2919893.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/DISKinematics.hh"
 
 namespace Rivet {
 
-
-  // Constructor
-  H1_1994_S2919893::H1_1994_S2919893() 
-    : Analysis("H1_1994_S2919893")
-  {
-    setBeams(ELECTRON, PROTON);
-    addProjection(DISLepton(), "Lepton");
-    addProjection(DISKinematics(), "Kinematics");
-    addProjection(FinalState(), "FS");
-  }
-
-
-  void H1_1994_S2919893::analyze(const Event& event) {
-        
-    const FinalState& fs = applyProjection<FinalState>(event, "FS");
-    const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
-    const DISLepton& dl = applyProjection<DISLepton>(event,"Lepton");
-    
-    // Get the DIS kinematics
-    double x  = dk.x();
-    double w2 = dk.W2();
-    double w = sqrt(w2);
-
-    // Momentum of the scattered lepton
-    FourMomentum leptonMom = dl.out().momentum();
-    double ptel = pT(leptonMom);
-    double enel = leptonMom.E();
-    double thel = leptonMom.angle(dk.beamHadron().momentum())/degree;
-    
-    // Extract the particles other than the lepton
-    ParticleVector particles;
-    particles.reserve(fs.particles().size());
-    const GenParticle& dislepGP = dl.out().genParticle();
-    foreach (const Particle& p, fs.particles()) {
-      const GenParticle& loopGP = p.genParticle(); 
-      if (&loopGP == &dislepGP) continue;
-      particles.push_back(p);
+  /// @brief H1 energy flow and charged particle spectra
+  /// @author Peter Richardson
+  /// Based on the equivalent HZTool analysis
+  class H1_1994_S2919893 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_1994_S2919893()
+      : Analysis("H1_1994_S2919893")
+    {
+      setBeams(ELECTRON, PROTON);
+      addProjection(DISLepton(), "Lepton");
+      addProjection(DISKinematics(), "Kinematics");
+      addProjection(FinalState(), "FS");
     }
     
-    // Cut on the forward energy
-    double efwd = 0.0;
-    foreach (const Particle& p, particles) {
-      double th = p.momentum().angle(dk.beamHadron().momentum())/degree;
-      if (th > 4.4 && th < 15.) {
-        efwd += p.momentum().E();
-      }
-    }
 
-    // Apply the cuts
-    // Lepton energy and angle, w2 and forward energy
-    getLog()<<Log::DEBUG<<"enel/GeV = "<<enel/GeV<<", thel = "<<thel<<", w2 = "<<w2<<", efwd/GeV = "<<efwd/GeV<<std::endl;
-    bool cut = enel/GeV > 14. && thel > 157. && thel < 172.5 && w2 >= 3000. && efwd/GeV > 0.5;
-    if (!cut) vetoEvent;
-
-    // Weight of the event
-    const double weight = event.weight();
-    // weights for x<1e-3 and x>1e-3
-    if (x < 1e-3) {
-      _wEnergy.first  += weight;
-    } else {
-      _wEnergy.second += weight;
-    }
 
-    // Boost to hadronic CM
-    const LorentzTransform hcmboost = dk.boostHCM();
-    // Loop over the particles
-    long ncharged(0);
-    for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
-      const Particle& p = particles[ip1];
-
-      double th = p.momentum().angle(dk.beamHadron().momentum()) / degree;
-      // Boost momentum to lab
-      const FourMomentum hcmMom = hcmboost.transform(p.momentum());
-      // Angular cut
-      if (th <= 4.4) continue;
-
-      // Energy flow histogram
-      double et = fabs(Et(hcmMom));
-      double eta = -hcmMom.pseudorapidity(); 
+    /// @name Analysis methods
+    //@{
+    
+    void analyze(const Event& event) {
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const DISLepton& dl = applyProjection<DISLepton>(event,"Lepton");
+      
+      // Get the DIS kinematics
+      double x  = dk.x();
+      double w2 = dk.W2();
+      double w = sqrt(w2);
+
+      // Momentum of the scattered lepton
+      FourMomentum leptonMom = dl.out().momentum();
+      double ptel = pT(leptonMom);
+      double enel = leptonMom.E();
+      double thel = leptonMom.angle(dk.beamHadron().momentum())/degree;
+      
+      // Extract the particles other than the lepton
+      ParticleVector particles;
+      particles.reserve(fs.particles().size());
+      const GenParticle& dislepGP = dl.out().genParticle();
+      foreach (const Particle& p, fs.particles()) {
+        const GenParticle& loopGP = p.genParticle(); 
+        if (&loopGP == &dislepGP) continue;
+        particles.push_back(p);
+      }
+      
+      // Cut on the forward energy
+      double efwd = 0.0;
+      foreach (const Particle& p, particles) {
+        double th = p.momentum().angle(dk.beamHadron().momentum())/degree;
+        if (th > 4.4 && th < 15.) {
+          efwd += p.momentum().E();
+        }
+      }
+      
+      // Apply the cuts
+      // Lepton energy and angle, w2 and forward energy
+      getLog()<<Log::DEBUG<<"enel/GeV = "<<enel/GeV<<", thel = "<<thel<<", w2 = "<<w2<<", efwd/GeV = "<<efwd/GeV<<std::endl;
+      bool cut = enel/GeV > 14. && thel > 157. && thel < 172.5 && w2 >= 3000. && efwd/GeV > 0.5;
+      if (!cut) vetoEvent;
+      
+      // Weight of the event
+      const double weight = event.weight();
+      // weights for x<1e-3 and x>1e-3
       if (x < 1e-3) {
-        _histEnergyFlowLowX ->fill(eta, et*weight);
+        _wEnergy.first  += weight;
       } else {
-        _histEnergyFlowHighX->fill(eta, et*weight);
+        _wEnergy.second += weight;
       }
-      if (PID::threeCharge(p.pdgId()) != 0) {
-        /// @todo Use units in w comparisons... what are the units?
-        if (w > 50. && w <= 200.) {
-          double xf= -2 * hcmMom.z() / w;
-          double pt2 = pT2(hcmMom);
-          if (w > 50. && w <= 100.) {
-            _histSpectraW77 ->fill(xf, weight); 
-          } else if (w > 100. && w <= 150.) {
-            _histSpectraW122->fill(xf, weight);
-          } else if (w > 150. && w <= 200.) {
-            _histSpectraW169->fill(xf, weight);
+      
+      // Boost to hadronic CM
+      const LorentzTransform hcmboost = dk.boostHCM();
+      // Loop over the particles
+      long ncharged(0);
+      for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
+        const Particle& p = particles[ip1];
+        
+        double th = p.momentum().angle(dk.beamHadron().momentum()) / degree;
+        // Boost momentum to lab
+        const FourMomentum hcmMom = hcmboost.transform(p.momentum());
+        // Angular cut
+        if (th <= 4.4) continue;
+        
+        // Energy flow histogram
+        double et = fabs(Et(hcmMom));
+        double eta = -hcmMom.pseudorapidity(); 
+        if (x < 1e-3) {
+          _histEnergyFlowLowX ->fill(eta, et*weight);
+        } else {
+          _histEnergyFlowHighX->fill(eta, et*weight);
+        }
+        if (PID::threeCharge(p.pdgId()) != 0) {
+          /// @todo Use units in w comparisons... what are the units?
+          if (w > 50. && w <= 200.) {
+            double xf= -2 * hcmMom.z() / w;
+            double pt2 = pT2(hcmMom);
+            if (w > 50. && w <= 100.) {
+              _histSpectraW77 ->fill(xf, weight); 
+            } else if (w > 100. && w <= 150.) {
+              _histSpectraW122->fill(xf, weight);
+            } else if (w > 150. && w <= 200.) {
+              _histSpectraW169->fill(xf, weight);
+            }
+            _histSpectraW117->fill(xf, weight);
+            /// @todo Is this profile meant to be filled with 2 weight factors?
+            _histPT2->fill(xf, pt2*weight/GeV2, weight);
+            ++ncharged;
+          }
+        }
+
+
+        // Energy-energy correlation
+        if (th <= 8.) continue;
+        double phi1 = p.momentum().azimuthalAngle(ZERO_2PI);
+        double eta1 = p.momentum().pseudorapidity();
+        double et1 = fabs(Et(p.momentum()));
+        for (size_t ip2 = ip1+1; ip2 < particles.size(); ++ip2) {
+          const Particle& p2 = particles[ip2];
+
+          //double th2 = beamAngle(p2.momentum(), order);
+          double th2 = p2.momentum().angle(dk.beamHadron().momentum()) / degree;
+          if (th2 <= 8.) continue;
+          double phi2 = p2.momentum().azimuthalAngle(ZERO_2PI);
+
+          /// @todo Use angle function
+          double deltaphi = phi1 - phi2;
+          if (fabs(deltaphi) > PI) 
+            deltaphi = fabs(fabs(deltaphi) - TWOPI);
+          double eta2 = p2.momentum().pseudorapidity();
+          double omega = sqrt(sqr(eta1-eta2) + sqr(deltaphi));
+          double et2 = fabs(Et(p2.momentum()));
+          double wt = et1*et2 / sqr(ptel) * weight;
+          if(x < 1e-3) {
+            _histEECLowX ->fill(omega, wt);
+          } else {
+            _histEECHighX->fill(omega,wt);
           }
-          _histSpectraW117->fill(xf, weight);
-          /// @todo Is this profile meant to be filled with 2 weight factors?
-          _histPT2->fill(xf, pt2*weight/GeV2, weight);
-          ++ncharged;
         }
       }
-    
 
-      // Energy-energy correlation
-      if (th <= 8.) continue;
-      double phi1 = p.momentum().azimuthalAngle(ZERO_2PI);
-      double eta1 = p.momentum().pseudorapidity();
-      double et1 = fabs(Et(p.momentum()));
-      for (size_t ip2 = ip1+1; ip2 < particles.size(); ++ip2) {
-        const Particle& p2 = particles[ip2];
-        
-        //double th2 = beamAngle(p2.momentum(), order);
-        double th2 = p2.momentum().angle(dk.beamHadron().momentum()) / degree;
-        if (th2 <= 8.) continue;
-        double phi2 = p2.momentum().azimuthalAngle(ZERO_2PI);
-
-        /// @todo Use angle function
-        double deltaphi = phi1 - phi2;
-        if (fabs(deltaphi) > PI) 
-          deltaphi = fabs(fabs(deltaphi) - TWOPI);
-        double eta2 = p2.momentum().pseudorapidity();
-        double omega = sqrt(sqr(eta1-eta2) + sqr(deltaphi));
-        double et2 = fabs(Et(p2.momentum()));
-        double wt = et1*et2 / sqr(ptel) * weight;
-        if(x < 1e-3) {
-          _histEECLowX ->fill(omega, wt);
+      // Factors for normalization
+      if (w > 50. && w <= 200.) {
+        if (w <= 100.) {
+          _w77.first  += ncharged*weight;
+          _w77.second += weight;
+        } else if (w <= 150.) {
+          _w122.first  += ncharged*weight;
+          _w122.second += weight;
         } else {
-          _histEECHighX->fill(omega,wt);
+          _w169.first  += ncharged*weight;
+          _w169.second += weight;
         }
+        _w117.first  += ncharged*weight;
+        _w117.second += weight;
       }
     }
 
-    // Factors for normalization
-    if (w > 50. && w <= 200.) {
-      if (w <= 100.) {
-        _w77.first  += ncharged*weight;
-        _w77.second += weight;
-      } else if (w <= 150.) {
-        _w122.first  += ncharged*weight;
-        _w122.second += weight;
-      } else {
-        _w169.first  += ncharged*weight;
-        _w169.second += weight;
-      }
-      _w117.first  += ncharged*weight;
-      _w117.second += weight;
+
+
+    void init() {
+      _w77  = make_pair(0.0, 0.0);
+      _w122 = make_pair(0.0, 0.0);
+      _w169 = make_pair(0.0, 0.0);
+      _w117 = make_pair(0.0, 0.0);
+      _wEnergy = make_pair(0.0, 0.0);
+
+      /// @todo What is "N"?
+      _histEnergyFlowLowX =  bookHistogram1D(1, 1, 1);
+      _histEnergyFlowHighX = bookHistogram1D(1, 1, 2);
+
+      _histEECLowX = bookHistogram1D(2, 1, 1);
+      _histEECHighX = bookHistogram1D(2, 1, 2);
+
+      /// @todo Add cross-section units to label
+      _histSpectraW77 = bookHistogram1D(3, 1, 1);
+      _histSpectraW122 = bookHistogram1D(3, 1, 2);
+      _histSpectraW169 = bookHistogram1D(3, 1, 3);
+      _histSpectraW117 = bookHistogram1D(3, 1, 4);
+
+      _histPT2 = bookProfile1D(4, 1, 1);
     }
-  }
 
 
-  void H1_1994_S2919893::init() {
-    _w77  = make_pair(0.0, 0.0);
-    _w122 = make_pair(0.0, 0.0);
-    _w169 = make_pair(0.0, 0.0);
-    _w117 = make_pair(0.0, 0.0);
-    _wEnergy = make_pair(0.0, 0.0);
-
-    /// @todo What is "N"?
-    _histEnergyFlowLowX =  bookHistogram1D(1, 1, 1);
-    _histEnergyFlowHighX = bookHistogram1D(1, 1, 2);
-
-    _histEECLowX = bookHistogram1D(2, 1, 1);
-    _histEECHighX = bookHistogram1D(2, 1, 2);
-
-    /// @todo Add cross-section units to label
-    _histSpectraW77 = bookHistogram1D(3, 1, 1);
-    _histSpectraW122 = bookHistogram1D(3, 1, 2);
-    _histSpectraW169 = bookHistogram1D(3, 1, 3);
-    _histSpectraW117 = bookHistogram1D(3, 1, 4);
-
-    _histPT2 = bookProfile1D(4, 1, 1);
-  }
-  
-  
-  // Finalize
-  void H1_1994_S2919893::finalize() { 
-    // Normalize inclusive single particle distributions to the average number 
-    // of charged particles per event.
-    double avgNumParts = _w77.first/_w77.second;
-    normalize(_histSpectraW77, avgNumParts);
-
-    avgNumParts = _w122.first/_w122.second;
-    normalize(_histSpectraW122, avgNumParts);
-
-    avgNumParts = _w169.first/_w169.second;
-    normalize(_histSpectraW169, avgNumParts);
-
-    avgNumParts = _w117.first/_w117.second;
-    normalize(_histSpectraW117, avgNumParts);
-
-    scale(_histEnergyFlowLowX , 1./_wEnergy.first );
-    scale(_histEnergyFlowHighX, 1./_wEnergy.second);
-
-    scale(_histEECLowX , 1./_wEnergy.first );
-    scale(_histEECHighX, 1./_wEnergy.second); 
-  }
+    /// Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      double avgNumParts = _w77.first/_w77.second;
+      normalize(_histSpectraW77, avgNumParts);
+
+      avgNumParts = _w122.first/_w122.second;
+      normalize(_histSpectraW122, avgNumParts);
+
+      avgNumParts = _w169.first/_w169.second;
+      normalize(_histSpectraW169, avgNumParts);
+
+      avgNumParts = _w117.first/_w117.second;
+      normalize(_histSpectraW117, avgNumParts);
+
+      scale(_histEnergyFlowLowX , 1./_wEnergy.first );
+      scale(_histEnergyFlowHighX, 1./_wEnergy.second);
+
+      scale(_histEECLowX , 1./_wEnergy.first );
+      scale(_histEECHighX, 1./_wEnergy.second); 
+    }
+
+
+    //@}
+
+
+  private:
+
+    /**
+     *  Polar angle with right direction of the beam
+     */
+    inline double beamAngle(const FourVector& v, const bool & order) {
+      double thel = v.polarAngle()/degree;
+      if(thel<0.) thel+=180.;
+      if(!order) thel = 180.-thel;
+      return thel;
+    }
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histEnergyFlowLowX;
+    AIDA::IHistogram1D *_histEnergyFlowHighX;
+    AIDA::IHistogram1D *_histEECLowX;
+    AIDA::IHistogram1D *_histEECHighX;
+    AIDA::IHistogram1D *_histSpectraW77;
+    AIDA::IHistogram1D *_histSpectraW122;
+    AIDA::IHistogram1D *_histSpectraW169;
+    AIDA::IHistogram1D *_histSpectraW117;
+    AIDA::IProfile1D *_histPT2;
+    //@}
+
+    /// @name storage of weight to calculate averages for normalisation
+    //@{
+    pair<double,double> _w77,_w122,_w169,_w117,_wEnergy;
+    //@}
+  };
 
 
 

Modified: trunk/src/Analyses/H1_1995_S3167097.cc
==============================================================================
--- trunk/src/Analyses/H1_1995_S3167097.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/H1_1995_S3167097.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,121 +1,153 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/H1_1995_S3167097.hh"
 #include "Rivet/Projections/FinalStateHCM.hh"
 #include "Rivet/Projections/CentralEtHCM.hh"
 
 namespace Rivet {
 
 
-  const double H1_1995_S3167097::_xmin = -6.0;
-  const double H1_1995_S3167097::_xmax = 6.0;
-
-
-  H1_1995_S3167097::H1_1995_S3167097() 
-    : Analysis("H1_1995_S3167097")
-  { 
-    setBeams(ELECTRON, PROTON);
-    const DISKinematics& diskin = addProjection(DISKinematics(), "Kinematics");
-    const FinalStateHCM& fshcm = addProjection(FinalStateHCM(diskin), "FS");
-    addProjection(CentralEtHCM(fshcm), "Y1HCM");
-    //addCut("x", MORE_EQ, _xmin);
-    //addCut("x", LESS_EQ, _xmax);
-  }
-
-
-  void H1_1995_S3167097::init() {
-    _hEtFlow = vector<AIDA::IHistogram1D *>(_nbin);
-    _hEtFlowStat = vector<AIDA::IHistogram1D *>(_nbin);
-    _nev = vector<double>(_nbin);
-    /// @todo Automate this sort of thing so that the analysis code is more readable.
-    for (size_t i = 0; i < _nbin; ++i) {
-      string istr(1, char('1' + i));
-      _hEtFlow[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
-      _hEtFlowStat[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
-    }
-    _hAvEt = bookHistogram1D("21tmp", _nbin, 1.0, 10.0);
-    _hAvX  = bookHistogram1D("22tmp", _nbin, 1.0, 10.0);
-    _hAvQ2 = bookHistogram1D("23tmp", _nbin, 1.0, 10.0);
-    _hN    = bookHistogram1D("24", _nbin, 1.0, 10.0);
-  }
-
-
-  /// Calculate the bin number from the DISKinematics projection  
-  int _getbin(const DISKinematics& dk) {
-    if ( dk.Q2() > 5.0*GeV2 && dk.Q2() <= 10.0*GeV2 ) {
-      if ( dk.x() > 0.0001 && dk.x() <= 0.0002 )
-        return 0;
-      else if ( dk.x() > 0.0002 && dk.x() <= 0.0005 && dk.Q2() > 6.0*GeV2 )
-        return 1;
-    }
-    else if ( dk.Q2() > 10.0*GeV2 && dk.Q2() <= 20.0*GeV2 ){
-      if ( dk.x() > 0.0002 && dk.x() <= 0.0005 )
-        return 2;
-      else if ( dk.x() > 0.0005 && dk.x() <= 0.0008 )
-        return 3;
-      else if ( dk.x() > 0.0008 && dk.x() <= 0.0015 )
-        return 4;
-      else if ( dk.x() > 0.0015 && dk.x() <= 0.0040 )
-        return 5;
-    }
-    else if ( dk.Q2() > 20.0*GeV2 && dk.Q2() <= 50.0*GeV2 ){
-      if ( dk.x() > 0.0005 && dk.x() <= 0.0014 )
-        return 6;
-      else if ( dk.x() > 0.0014 && dk.x() <= 0.0030 )
-        return 7;
-      else if ( dk.x() > 0.0030 && dk.x() <= 0.0100 )
-        return 8;
-    }
-    return -1;
-  }
+  /// @brief Measures energy flow in DIS? To be checked!
+  /// @todo Check this analysis!
+  /// @author Leif Lonnblad
+  class H1_1995_S3167097 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_1995_S3167097() 
+      : Analysis("H1_1995_S3167097")
+    { 
+      setBeams(ELECTRON, PROTON);
+      const DISKinematics& diskin = addProjection(DISKinematics(), "Kinematics");
+      const FinalStateHCM& fshcm = addProjection(FinalStateHCM(diskin), "FS");
+      addProjection(CentralEtHCM(fshcm), "Y1HCM");
+      //addCut("x", MORE_EQ, _xmin);
+      //addCut("x", LESS_EQ, _xmax);
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _hEtFlow = vector<AIDA::IHistogram1D *>(_nbin);
+      _hEtFlowStat = vector<AIDA::IHistogram1D *>(_nbin);
+      _nev = vector<double>(_nbin);
+      /// @todo Automate this sort of thing so that the analysis code is more readable.
+      for (size_t i = 0; i < _nbin; ++i) {
+        string istr(1, char('1' + i));
+        _hEtFlow[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
+        _hEtFlowStat[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
+      }
+      _hAvEt = bookHistogram1D("21tmp", _nbin, 1.0, 10.0);
+      _hAvX  = bookHistogram1D("22tmp", _nbin, 1.0, 10.0);
+      _hAvQ2 = bookHistogram1D("23tmp", _nbin, 1.0, 10.0);
+      _hN    = bookHistogram1D("24", _nbin, 1.0, 10.0);
+    }
+    
+    
+    /// Calculate the bin number from the DISKinematics projection  
+    int _getbin(const DISKinematics& dk) {
+      if ( dk.Q2() > 5.0*GeV2 && dk.Q2() <= 10.0*GeV2 ) {
+        if ( dk.x() > 0.0001 && dk.x() <= 0.0002 )
+          return 0;
+        else if ( dk.x() > 0.0002 && dk.x() <= 0.0005 && dk.Q2() > 6.0*GeV2 )
+          return 1;
+      }
+      else if ( dk.Q2() > 10.0*GeV2 && dk.Q2() <= 20.0*GeV2 ){
+        if ( dk.x() > 0.0002 && dk.x() <= 0.0005 )
+          return 2;
+        else if ( dk.x() > 0.0005 && dk.x() <= 0.0008 )
+          return 3;
+        else if ( dk.x() > 0.0008 && dk.x() <= 0.0015 )
+          return 4;
+        else if ( dk.x() > 0.0015 && dk.x() <= 0.0040 )
+          return 5;
+      }
+      else if ( dk.Q2() > 20.0*GeV2 && dk.Q2() <= 50.0*GeV2 ){
+        if ( dk.x() > 0.0005 && dk.x() <= 0.0014 )
+          return 6;
+        else if ( dk.x() > 0.0014 && dk.x() <= 0.0030 )
+          return 7;
+        else if ( dk.x() > 0.0030 && dk.x() <= 0.0100 )
+          return 8;
+      }
+      return -1;
+    }
+    
+    
+    void analyze(const Event& event) {
+      const FinalStateHCM& fs = applyProjection<FinalStateHCM>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const CentralEtHCM y1 = applyProjection<CentralEtHCM>(event, "Y1HCM");
+      
+      const int ibin = _getbin(dk);
+      if (ibin < 0) vetoEvent;
+      const double weight = event.weight();
+      
+      for (size_t i = 0, N = fs.particles().size(); i < N; ++i) {
+        const double rap = fs.particles()[i].momentum().rapidity();
+        const double et = fs.particles()[i].momentum().Et();
+        _hEtFlow[ibin]->fill(rap, weight * et/GeV);
+        _hEtFlowStat[ibin]->fill(rap, weight * et/GeV);
+      }
+      
+      _nev[ibin] += weight;
+      _hAvEt->fill(ibin + 1.5, weight * y1.sumEt()/GeV);
+      _hAvX->fill(ibin + 1.5, weight * dk.x());
+      _hAvQ2->fill(ibin + 1.5, weight * dk.Q2()/GeV2);
+      _hN->fill(ibin + 1.5, weight);
+    }
+    
+    
+    void finalize() {
+      for (size_t ibin = 0; ibin < _nbin; ++ibin) {
+        _hEtFlow[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
+        _hEtFlowStat[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
+      }
+      
+      /// @todo Automate this sort of thing so that the analysis code is more readable.
+      AIDA::IDataPointSet* h = 0;
+      h = histogramFactory().divide("/H1_1995_S3167097/21", *_hAvEt, *_hN);
+      h->setTitle(_hAvEt->title());
+      histogramFactory().destroy(_hAvEt);
+      
+      h = histogramFactory().divide("/H1_1995_S3167097/22", *_hAvX, *_hN);
+      h->setTitle(_hAvX->title());
+      histogramFactory().destroy(_hAvX);
+      
+      h = histogramFactory().divide("/H1_1995_S3167097/23", *_hAvQ2, *_hN);
+      h->setTitle(_hAvQ2->title());
+      histogramFactory().destroy(_hAvQ2);
+    }
+    
+    //@}
+
+    
+  private:
+
+    /// Some integer constants used.
+    /// @todo Remove statics!
+    static const size_t _nb = 24, _nbin = 9;
+    
+    /// Some double constants used.
+    /// @todo Remove statics!
+    static const double _xmin, _xmax;
+
+    /// Histograms for the \f$ E_T \f$ flows
+    vector<AIDA::IHistogram1D*> _hEtFlow, _hEtFlowStat;
+
+    /// Histograms for averages in different kinematical bins.
+    AIDA::IHistogram1D *_hAvEt, *_hAvX, *_hAvQ2, *_hN;
+
+    /// Helper vector;
+    vector<double> _nev;
+  };
 
 
-  void H1_1995_S3167097::analyze(const Event& event) {
-    const FinalStateHCM& fs = applyProjection<FinalStateHCM>(event, "FS");
-    const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
-    const CentralEtHCM y1 = applyProjection<CentralEtHCM>(event, "Y1HCM");
-
-    const int ibin = _getbin(dk);
-    if (ibin < 0) vetoEvent;
-    const double weight = event.weight();
-
-    for (size_t i = 0, N = fs.particles().size(); i < N; ++i) {
-      const double rap = fs.particles()[i].momentum().rapidity();
-      const double et = fs.particles()[i].momentum().Et();
-      _hEtFlow[ibin]->fill(rap, weight * et/GeV);
-      _hEtFlowStat[ibin]->fill(rap, weight * et/GeV);
-    }
-
-    _nev[ibin] += weight;
-    _hAvEt->fill(ibin + 1.5, weight * y1.sumEt()/GeV);
-    _hAvX->fill(ibin + 1.5, weight * dk.x());
-    _hAvQ2->fill(ibin + 1.5, weight * dk.Q2()/GeV2);
-    _hN->fill(ibin + 1.5, weight);
-  }
-
-
-  void H1_1995_S3167097::finalize() {
-    for (size_t ibin = 0; ibin < _nbin; ++ibin) {
-      _hEtFlow[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
-      _hEtFlowStat[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
-    }
-
-    /// @todo Automate this sort of thing so that the analysis code is more readable.
-    AIDA::IDataPointSet* h = 0;
-    h = histogramFactory().divide("/H1_1995_S3167097/21", *_hAvEt, *_hN);
-    h->setTitle(_hAvEt->title());
-    histogramFactory().destroy(_hAvEt);
-
-    h = histogramFactory().divide("/H1_1995_S3167097/22", *_hAvX, *_hN);
-    h->setTitle(_hAvX->title());
-    histogramFactory().destroy(_hAvX);
-
-    h = histogramFactory().divide("/H1_1995_S3167097/23", *_hAvQ2, *_hN);
-    h->setTitle(_hAvQ2->title());
-    histogramFactory().destroy(_hAvQ2);
-  }
-
+  // Init statics
+  const double H1_1995_S3167097::_xmin = -6.0;
+  const double H1_1995_S3167097::_xmax = 6.0;
 
 
   // This global object acts as a hook for the plugin system

Modified: trunk/src/Analyses/H1_2000_S4129130.cc
==============================================================================
--- trunk/src/Analyses/H1_2000_S4129130.cc	Mon Aug 31 12:51:00 2009	(r1791)
+++ trunk/src/Analyses/H1_2000_S4129130.cc	Mon Aug 31 15:12:13 2009	(r1792)
@@ -1,7 +1,6 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh" 
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/H1_2000_S4129130.hh"
 #include "Rivet/Math/Constants.hh"
 #include "Rivet/Tools/ParticleIDMethods.hh"
 #include "Rivet/Projections/FinalState.hh"
@@ -10,251 +9,293 @@
 namespace Rivet {
 
 
-  // Constructor
-  H1_2000_S4129130::H1_2000_S4129130() 
-    : Analysis("H1_2000_S4129130")
-  {
-    setBeams(ELECTRON, PROTON);
-    addProjection(DISLepton(), "Lepton");
-    addProjection(DISKinematics(), "Kinematics");
-    addProjection(FinalState(), "FS");
-  }
-
-
-  void H1_2000_S4129130::analyze(const Event& event) {
-    // Get the projections
-    const FinalState & fs = applyProjection<FinalState>(event, "FS");
-    const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
-    const DISLepton    & dl = applyProjection<DISLepton>(event,"Lepton");
-
-    // Get the DIS kinematics
-    double q2  = dk.Q2();
-    double x   = dk.x();
-    double y   = dk.y();
-    double w2  = dk.W2();
-
-    // Momentum of the scattered lepton
-    FourMomentum leptonMom = dl.out().momentum();
-    // pT energy and angle
-    double enel = leptonMom.E();
-    double thel = 180.-leptonMom.angle(dl.in().momentum())/degree;
-
-    // Extract the particles other than the lepton
-    ParticleVector particles;
-    particles.reserve(fs.particles().size());
-    const GenParticle& dislepGP = dl.out().genParticle();
-    for (ParticleVector::const_iterator p = fs.particles().begin();
-         p != fs.particles().end(); ++p) {
-      const GenParticle& loopGP = p->genParticle(); 
-      if (&loopGP == &dislepGP) continue;
-      particles.push_back(*p);
-    }
-
-    // Cut on the forward energy
-    double efwd = 0.;
-    foreach (const Particle& p, particles) {
-      double th = 180.-p.momentum().angle(dl.in().momentum())/degree;
-//      double th = beamAngle(p.momentum(),order);
-      if(th > 4.4 && th < 15.0) efwd += p.momentum().E();
-    }
-    // There are four possible selections for events
-    bool evcut[4];
-    // Low  Q2 selection a
-    /// @todo Units and inRange
-    evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 && 
-               thel > 157. && thel < 176.0;
-    // Low  Q2 selection b
-    /// @todo Units and inRange
-    evcut[1] = enel/GeV > 12. && y > 0.3 && y < 0.5;
-    // High Q2 selection a
-    /// @todo Units and inRange
-    evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 && 
-               w2 >= 4400. && efwd > 0.5;
-    // High Q2 selection b
-    /// @todo Units and inRange
-    evcut[3] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
-               w2 > 27110. && w2 < 45182.;
-
-    // Veto if fails all cuts
-    if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]) ) {
-      vetoEvent;
-    }
-
-    // Find the bins
-    int bin[4] = {-1,-1,-1,-1};
-    // For the low Q2 selection a)
-    /// @todo Units
-    if (q2 > 2.5 && q2 <= 5.) {
-      if (x > 0.00005 && x <= 0.0001 ) bin[0] = 0;
-      if (x > 0.0001  && x <= 0.0002 ) bin[0] = 1;
-      if (x > 0.0002  && x <= 0.00035) bin[0] = 2;
-      if (x > 0.00035 && x <= 0.0010 ) bin[0] = 3;
-    }
-    /// @todo Units
-    else if(q2 > 5. && q2 <= 10.) {
-      if (x > 0.0001  && x <= 0.0002 ) bin[0] = 4;
-      if (x > 0.0002  && x <= 0.00035) bin[0] = 5;
-      if (x > 0.00035 && x <= 0.0007 ) bin[0] = 6;
-      if (x > 0.0007  && x <= 0.0020 ) bin[0] = 7;
-    }
-    /// @todo Units
-    else if(q2 > 10. && q2 <= 20.) {
-      if (x > 0.0002 && x <= 0.0005) bin[0] = 8;
-      if (x > 0.0005 && x <= 0.0008) bin[0] = 9;
-      if (x > 0.0008 && x <= 0.0015) bin[0] = 10;
-      if (x > 0.0015 && x <= 0.040 ) bin[0] = 11;
-    }
-    /// @todo Units
-    else if(q2 > 20. && q2 <= 50.) {
-      if (x > 0.0005 && x <= 0.0014) bin[0] = 12;
-      if (x > 0.0014 && x <= 0.0030) bin[0] = 13;
-      if (x > 0.0030 && x <= 0.0100) bin[0] = 14;
-    }
-    /// @todo Units
-    else if (q2 > 50. && q2 <= 100.) {
-      if (x >0.0008 && x <= 0.0030) bin[0] = 15;
-      if (x >0.0030 && x <= 0.0200) bin[0] = 16;
-    }
-    /// @todo Huh?
-    evcut[0] &= bin[0] >= 0;
-    // For the low Q2 selection b)
-    if (q2 > 2.5 && q2 <= 5.  ) bin[1] = 0;
-    if (q2 > 5.  && q2 <= 10. ) bin[1] = 1;
-    if (q2 > 10. && q2 <= 20. ) bin[1] = 2;
-    if (q2 > 20. && q2 <= 50. ) bin[1] = 3;
-    if (q2 > 50. && q2 <= 100.) bin[1] = 4;
-    evcut[1] &= bin[1] >= 0;
-    // for the high Q2 selection a)
-    /// @todo Units
-    if (q2 > 100. && q2 <= 400.) {
-      if (x > 0.00251 && x <= 0.00631) bin[2] = 0;
-      if (x > 0.00631 && x <= 0.0158 ) bin[2] = 1;
-      if (x > 0.0158  && x <= 0.0398 ) bin[2] = 2;
-    }
-    /// @todo Units
-    else if (q2 > 400 && q2 <= 1100.) {
-      if (x > 0.00631 && x <= 0.0158 ) bin[2] = 3;
-      if (x > 0.0158  && x <= 0.0398 ) bin[2] = 4;
-      if (x > 0.0398  && x <= 1.     ) bin[2] = 5;
-    }
-    /// @todo Units
-    else if (q2 > 1100. && q2 <= 100000.) {
-      if (x > 0. && x <= 1.) bin[2] = 6;
-    }
-    evcut[2] &= bin[2] >= 0;
-    // for the high Q2 selection b)
-    /// @todo Units
-    if      (q2 > 100. && q2 <= 220.) bin[3] = 0;
-    else if (q2 > 220. && q2 <= 400.) bin[3] = 1;
-    else if (q2 > 400.              ) bin[3] = 2;
-    evcut[3] &= bin[3] >= 0;
-
-    // Veto if fails all cuts after bin selection
-    if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]));
-
-    // Increment the count for normalisation
-    const double weight = event.weight();
-    if (evcut[0]) _weightETLowQa [bin[0]] += weight;
-    if (evcut[1]) _weightETLowQb [bin[1]] += weight;
-    if (evcut[2]) _weightETHighQa[bin[2]] += weight;
-    if (evcut[3]) _weightETHighQb[bin[3]] += weight;
-
-    // Boost to hadronicCM
-    const LorentzTransform hcmboost = dk.boostHCM();
-
-    // Loop over the particles
-    double etcent = 0;
-    double etfrag = 0;
-    foreach (const Particle& p, particles) {
-      // Boost momentum to CMS
-      const FourMomentum hcmMom = hcmboost.transform(p.momentum());
-      double et = fabs(Et(hcmMom));
-      double eta = -hcmMom.pseudorapidity();
-      // Averages in central and forward region
-      if (fabs(eta) < .5 ) etcent += et;
-      if (eta > 2 && eta <= 3.) etfrag += et;
-      // Histograms of Et flow
-      if (evcut[0]) _histETLowQa [bin[0]]->fill(eta, et*weight);
-      if (evcut[1]) _histETLowQb [bin[1]]->fill(eta, et*weight);
-      if (evcut[2]) _histETHighQa[bin[2]]->fill(eta, et*weight);
-      if (evcut[3]) _histETHighQb[bin[3]]->fill(eta, et*weight);
-    }
-    // Fill histograms for the average quantities
-    if (evcut[1] || evcut[3]) {
-      _histAverETCentral->fill(q2, etcent*weight,weight);
-      _histAverETFrag   ->fill(q2, etfrag*weight,weight);
-    }
-  }
-
-
-  void H1_2000_S4129130::init() {
-
-    string t = "Transverse energy flow for ";
-    IHistogram1D* h = 0;
-
-    /// @todo What is "N"?
-
-    const string xt = "\\langle x \\rangle";
-    const string Q2t = "\\langle Q^2 \\rangle";
-
-    // Histograms and weight vectors for low Q^2 a
-    _histETLowQa.reserve(17);
-    _weightETLowQa.reserve(17);
-    for (size_t ix = 0; ix < 17; ++ix) {
-      h = bookHistogram1D(ix+1, 1, 1);
-      _histETLowQa.push_back(h);
-      _weightETLowQa.push_back(0.);
-    }
-
-    // Histograms and weight vectors for high Q^2 a
-    _histETHighQa.reserve(7);
-    _weightETHighQa.reserve(7);
-    for (size_t ix = 0; ix < 7; ++ix) {
-      h = bookHistogram1D(ix+18, 1, 1);
-      _histETHighQa.push_back(h);
-      _weightETHighQa.push_back(0.);
-    }
-
-    // Histograms and weight vectors for low Q^2 b
-    _histETLowQb.reserve(5);
-    _weightETLowQb.reserve(5);
-    for (size_t ix = 0; ix < 5; ++ix) {
-      h = bookHistogram1D(ix+25, 1, 1);
-      _histETLowQb.push_back(h);
-      _weightETLowQb.push_back(0.);
-    }
-
-    // Histograms and weight vectors for high Q^2 b
-    _histETHighQb.reserve(3);
-    _weightETHighQb.reserve(3);
-    for (size_t ix = 0; ix < 3; ++ix) {
-      h = bookHistogram1D(30+ix, 1, 1);
-      _histETHighQb.push_back(h);
-      _weightETHighQb.push_back(0.0);
-    }
-
-    // Histograms for the averages
-    _histAverETCentral = bookProfile1D(33,  1, 1);
-    _histAverETFrag = bookProfile1D(34,  1, 1);
-  }
-
-
-  // Finalize
-  void H1_2000_S4129130::finalize() { 
-    // Normalization of the Et distributions
-    for (size_t ix=0; ix<17; ++ix) {
-      scale(_histETLowQa[ix], 1./_weightETLowQa[ix]);
-    }
-    for(size_t ix=0; ix<7; ++ix) {
-      scale(_histETHighQa[ix], 1./_weightETHighQa[ix]);
-    }
-    for(size_t ix=0; ix<5; ++ix) {
-      scale(_histETLowQb[ix], 1./_weightETLowQb[ix]);
-    }
-    for(size_t ix=0; ix<3; ++ix) {
-      scale(_histETHighQb[ix], 1./_weightETHighQb[ix]);
-    }
-  }
+  /// @brief H1 energy flow and charged particle spectra
+  /// @author Peter Richardson
+  /// Based on the HZtool analysis
+  class H1_2000_S4129130 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_2000_S4129130()
+      : Analysis("H1_2000_S4129130")
+    {
+      setBeams(ELECTRON, PROTON);
+      addProjection(DISLepton(), "Lepton");
+      addProjection(DISKinematics(), "Kinematics");
+      addProjection(FinalState(), "FS");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& event) {
+      // Get the projections
+      const FinalState & fs = applyProjection<FinalState>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const DISLepton    & dl = applyProjection<DISLepton>(event,"Lepton");
+
+      // Get the DIS kinematics
+      double q2  = dk.Q2();
+      double x   = dk.x();
+      double y   = dk.y();
+      double w2  = dk.W2();
+      
+      // Momentum of the scattered lepton
+      FourMomentum leptonMom = dl.out().momentum();
+      // pT energy and angle
+      double enel = leptonMom.E();
+      double thel = 180.-leptonMom.angle(dl.in().momentum())/degree;
+
+      // Extract the particles other than the lepton
+      ParticleVector particles;
+      particles.reserve(fs.particles().size());
+      const GenParticle& dislepGP = dl.out().genParticle();
+      for (ParticleVector::const_iterator p = fs.particles().begin();
+           p != fs.particles().end(); ++p) {
+        const GenParticle& loopGP = p->genParticle(); 
+        if (&loopGP == &dislepGP) continue;
+        particles.push_back(*p);
+      }
+      
+      // Cut on the forward energy
+      double efwd = 0.;
+      foreach (const Particle& p, particles) {
+        double th = 180.-p.momentum().angle(dl.in().momentum())/degree;
+        //      double th = beamAngle(p.momentum(),order);
+        if (th > 4.4 && th < 15.0) efwd += p.momentum().E();
+      }
+      // There are four possible selections for events
+      bool evcut[4];
+      // Low  Q2 selection a
+      /// @todo Units and inRange
+      evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 && 
+        thel > 157. && thel < 176.0;
+      // Low  Q2 selection b
+      /// @todo Units and inRange
+      evcut[1] = enel/GeV > 12. && y > 0.3 && y < 0.5;
+      // High Q2 selection a
+      /// @todo Units and inRange
+      evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 && 
+        w2 >= 4400. && efwd > 0.5;
+      // High Q2 selection b
+      /// @todo Units and inRange
+      evcut[3] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
+        w2 > 27110. && w2 < 45182.;
+      
+      // Veto if fails all cuts
+      if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]) ) {
+        vetoEvent;
+      }
+      
+      // Find the bins
+      int bin[4] = {-1,-1,-1,-1};
+      // For the low Q2 selection a)
+      /// @todo Units
+      if (q2 > 2.5 && q2 <= 5.) {
+        if (x > 0.00005 && x <= 0.0001 ) bin[0] = 0;
+        if (x > 0.0001  && x <= 0.0002 ) bin[0] = 1;
+        if (x > 0.0002  && x <= 0.00035) bin[0] = 2;
+        if (x > 0.00035 && x <= 0.0010 ) bin[0] = 3;
+      }
+      /// @todo Units
+      else if(q2 > 5. && q2 <= 10.) {
+        if (x > 0.0001  && x <= 0.0002 ) bin[0] = 4;
+        if (x > 0.0002  && x <= 0.00035) bin[0] = 5;
+        if (x > 0.00035 && x <= 0.0007 ) bin[0] = 6;
+        if (x > 0.0007  && x <= 0.0020 ) bin[0] = 7;
+      }
+      /// @todo Units
+      else if(q2 > 10. && q2 <= 20.) {
+        if (x > 0.0002 && x <= 0.0005) bin[0] = 8;
+        if (x > 0.0005 && x <= 0.0008) bin[0] = 9;
+        if (x > 0.0008 && x <= 0.0015) bin[0] = 10;
+        if (x > 0.0015 && x <= 0.040 ) bin[0] = 11;
+      }
+      /// @todo Units
+      else if(q2 > 20. && q2 <= 50.) {
+        if (x > 0.0005 && x <= 0.0014) bin[0] = 12;
+        if (x > 0.0014 && x <= 0.0030) bin[0] = 13;
+        if (x > 0.0030 && x <= 0.0100) bin[0] = 14;
+      }
+      /// @todo Units
+      else if (q2 > 50. && q2 <= 100.) {
+        if (x >0.0008 && x <= 0.0030) bin[0] = 15;
+        if (x >0.0030 && x <= 0.0200) bin[0] = 16;
+      }
+      /// @todo Huh?
+      evcut[0] &= bin[0] >= 0;
+      // For the low Q2 selection b)
+      if (q2 > 2.5 && q2 <= 5.  ) bin[1] = 0;
+      if (q2 > 5.  && q2 <= 10. ) bin[1] = 1;
+      if (q2 > 10. && q2 <= 20. ) bin[1] = 2;
+      if (q2 > 20. && q2 <= 50. ) bin[1] = 3;
+      if (q2 > 50. && q2 <= 100.) bin[1] = 4;
+      evcut[1] &= bin[1] >= 0;
+      // for the high Q2 selection a)
+      /// @todo Units
+      if (q2 > 100. && q2 <= 400.) {
+        if (x > 0.00251 && x <= 0.00631) bin[2] = 0;
+        if (x > 0.00631 && x <= 0.0158 ) bin[2] = 1;
+        if (x > 0.0158  && x <= 0.0398 ) bin[2] = 2;
+      }
+      /// @todo Units
+      else if (q2 > 400 && q2 <= 1100.) {
+        if (x > 0.00631 && x <= 0.0158 ) bin[2] = 3;
+        if (x > 0.0158  && x <= 0.0398 ) bin[2] = 4;
+        if (x > 0.0398  && x <= 1.     ) bin[2] = 5;
+      }
+      /// @todo Units
+      else if (q2 > 1100. && q2 <= 100000.) {
+        if (x > 0. && x <= 1.) bin[2] = 6;
+      }
+      evcut[2] &= bin[2] >= 0;
+      // for the high Q2 selection b)
+      /// @todo Units
+      if      (q2 > 100. && q2 <= 220.) bin[3] = 0;
+      else if (q2 > 220. && q2 <= 400.) bin[3] = 1;
+      else if (q2 > 400.              ) bin[3] = 2;
+      evcut[3] &= bin[3] >= 0;
+      
+      // Veto if fails all cuts after bin selection
+      if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]));
+      
+      // Increment the count for normalisation
+      const double weight = event.weight();
+      if (evcut[0]) _weightETLowQa [bin[0]] += weight;
+      if (evcut[1]) _weightETLowQb [bin[1]] += weight;
+      if (evcut[2]) _weightETHighQa[bin[2]] += weight;
+      if (evcut[3]) _weightETHighQb[bin[3]] += weight;
+      
+      // Boost to hadronicCM
+      const LorentzTransform hcmboost = dk.boostHCM();
+      
+      // Loop over the particles
+      double etcent = 0;
+      double etfrag = 0;
+      foreach (const Particle& p, particles) {
+        // Boost momentum to CMS
+        const FourMomentum hcmMom = hcmboost.transform(p.momentum());
+        double et = fabs(Et(hcmMom));
+        double eta = -hcmMom.pseudorapidity();
+        // Averages in central and forward region
+        if (fabs(eta) < .5 ) etcent += et;
+        if (eta > 2 && eta <= 3.) etfrag += et;
+        // Histograms of Et flow
+        if (evcut[0]) _histETLowQa [bin[0]]->fill(eta, et*weight);
+        if (evcut[1]) _histETLowQb [bin[1]]->fill(eta, et*weight);
+        if (evcut[2]) _histETHighQa[bin[2]]->fill(eta, et*weight);
+        if (evcut[3]) _histETHighQb[bin[3]]->fill(eta, et*weight);
+      }
+      // Fill histograms for the average quantities
+      if (evcut[1] || evcut[3]) {
+        _histAverETCentral->fill(q2, etcent*weight,weight);
+        _histAverETFrag   ->fill(q2, etfrag*weight,weight);
+      }
+    }
+    
+    
+    void init() {
+      
+      string t = "Transverse energy flow for ";
+      IHistogram1D* h = 0;
+      
+      /// @todo What is "N"?
+      
+      const string xt = "\\langle x \\rangle";
+      const string Q2t = "\\langle Q^2 \\rangle";
+      
+      // Histograms and weight vectors for low Q^2 a
+      _histETLowQa.reserve(17);
+      _weightETLowQa.reserve(17);
+      for (size_t ix = 0; ix < 17; ++ix) {
+        h = bookHistogram1D(ix+1, 1, 1);
+        _histETLowQa.push_back(h);
+        _weightETLowQa.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for high Q^2 a
+      _histETHighQa.reserve(7);
+      _weightETHighQa.reserve(7);
+      for (size_t ix = 0; ix < 7; ++ix) {
+        h = bookHistogram1D(ix+18, 1, 1);
+        _histETHighQa.push_back(h);
+        _weightETHighQa.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for low Q^2 b
+      _histETLowQb.reserve(5);
+      _weightETLowQb.reserve(5);
+      for (size_t ix = 0; ix < 5; ++ix) {
+        h = bookHistogram1D(ix+25, 1, 1);
+        _histETLowQb.push_back(h);
+        _weightETLowQb.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for high Q^2 b
+      _histETHighQb.reserve(3);
+      _weightETHighQb.reserve(3);
+      for (size_t ix = 0; ix < 3; ++ix) {
+        h = bookHistogram1D(30+ix, 1, 1);
+        _histETHighQb.push_back(h);
+        _weightETHighQb.push_back(0.0);
+      }
+      
+      // Histograms for the averages
+      _histAverETCentral = bookProfile1D(33,  1, 1);
+      _histAverETFrag = bookProfile1D(34,  1, 1);
+    }
+    
+    
+    // Finalize
+    void finalize() { 
+      // Normalization of the Et distributions
+      for (size_t ix=0; ix<17; ++ix) {
+        scale(_histETLowQa[ix], 1./_weightETLowQa[ix]);
+      }
+      for(size_t ix=0; ix<7; ++ix) {
+        scale(_histETHighQa[ix], 1./_weightETHighQa[ix]);
+      }
+      for(size_t ix=0; ix<5; ++ix) {
+        scale(_histETLowQb[ix], 1./_weightETLowQb[ix]);
+      }
+      for(size_t ix=0; ix<3; ++ix) {
+        scale(_histETHighQb[ix], 1./_weightETHighQb[ix]);
+      }
+    }
+    
+
+    //@}
+
+
+  private:
+    
+    /// Polar angle with right direction of the beam
+    inline double beamAngle(const FourVector& v, const bool & order) {
+      double thel = v.polarAngle()/degree;
+      if(thel<0.) thel+=180.;
+      if(!order) thel = 180.-thel;
+      return thel;
+    }
+
+    /// @name Histograms
+    //@{
+    vector<AIDA::IHistogram1D *> _histETLowQa;
+    vector<AIDA::IHistogram1D *> _histETHighQa;
+    vector<AIDA::IHistogram1D *> _histETLowQb;
+    vector<AIDA::IHistogram1D *> _histETHighQb;
+    AIDA::IProfile1D * _histAverETCentral;
+    AIDA::IProfile1D * _histAverETFrag;
+    //@}
+
+    /// @name storage of weights for normalisation
+    //@{
+    vector<double> _weightETLowQa;
+    vector<double> _weightETHighQa;
+    vector<double> _weightETLowQb;
+    vector<double> _weightETHighQb;
+    //@}
+  };
 
 
 


More information about the Rivet-svn mailing list