[Rivet-svn] r1788 - in trunk: . include/Rivet include/Rivet/Analyses src/Analyses

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Mon Aug 31 11:42:11 BST 2009


Author: buckley
Date: Mon Aug 31 11:42:10 2009
New Revision: 1788

Log:
Removing headers for STAR and SFM analyses

Deleted:
   trunk/include/Rivet/Analyses/SFM_1984_S1178091.hh
   trunk/include/Rivet/Analyses/STAR_2006_S6870392.hh
   trunk/include/Rivet/Analyses/STAR_2008_S7993412.hh
   trunk/include/Rivet/Analyses/STAR_2009_UE_HELEN.hh
Modified:
   trunk/ChangeLog
   trunk/include/Rivet/Makefile.am
   trunk/src/Analyses/E735_1998_S3905616.cc
   trunk/src/Analyses/ExampleAnalysis.cc
   trunk/src/Analyses/ExampleTree.cc
   trunk/src/Analyses/SFM_1984_S1178091.cc
   trunk/src/Analyses/STAR_2006_S6870392.cc
   trunk/src/Analyses/STAR_2008_S7993412.cc
   trunk/src/Analyses/STAR_2009_UE_HELEN.cc

Modified: trunk/ChangeLog
==============================================================================
--- trunk/ChangeLog	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/ChangeLog	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,5 +1,7 @@
 2009-08-31  Andy Buckley  <andy at insectnation.org>
 
+	* Removing headers for STAR analyses.
+
 	* Cleaning and removing headers from UA1 and UA5 analyses.
 
 	* Removing headers for D0 analyses.

Modified: trunk/include/Rivet/Makefile.am
==============================================================================
--- trunk/include/Rivet/Makefile.am	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/include/Rivet/Makefile.am	Mon Aug 31 11:42:10 2009	(r1788)
@@ -30,8 +30,6 @@
 
 ## Standard analyses
 nobase_dist_noinst_HEADERS += \
-  Analyses/ExampleAnalysis.hh \
-  Analyses/ExampleTree.hh \
   Analyses/DELPHI_1995_S3137023.hh \
   Analyses/DELPHI_1996_S3430090.hh \
   Analyses/DELPHI_2002_069_CONF_603.hh \
@@ -49,11 +47,7 @@
   Analyses/MC_JetAnalysis.hh \
   Analyses/MC_LHC_LEADINGJETS.hh \
   Analyses/MC_LHC_ZANALYSIS.hh \
-  Analyses/MC_LHC_DIJET.hh \
-  Analyses/STAR_2006_S6870392.hh \
-  Analyses/STAR_2008_S7993412.hh \
-  Analyses/STAR_2009_UE_HELEN.hh \
-
+  Analyses/MC_LHC_DIJET.hh
 
 ## Projections
 nobase_pkginclude_HEADERS +=   \

Modified: trunk/src/Analyses/E735_1998_S3905616.cc
==============================================================================
--- trunk/src/Analyses/E735_1998_S3905616.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/E735_1998_S3905616.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,46 +1,63 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Tools/Logging.hh"
-#include "Rivet/Analyses/E735_1998_S3905616.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 
 namespace Rivet {
 
 
-  E735_1998_S3905616::E735_1998_S3905616()
+  class E735_1998_S3905616 : public Analysis {
+  public:
+    
+    /// Constructor
+    E735_1998_S3905616()
       : Analysis("E735_1998_S3905616") {
-    setBeams(PROTON, ANTIPROTON);
-    const ChargedFinalState cfs;
-    addProjection(cfs, "FS");
-  }
-
-
-  void E735_1998_S3905616::init() {
-    _hist_multiplicity = bookHistogram1D(1, 1, 1);
-  }
-
-
-  void E735_1998_S3905616::analyze(const Event& event) {
-    Log log = getLog();
-    const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
-    const size_t numParticles = fs.particles().size();
-
-    // Get the event weight
-    const double weight = event.weight();
-
-    // Fill histo of charged multiplicity distribution
-    _hist_multiplicity->fill(numParticles, weight);
-  }
-
-
-  void E735_1998_S3905616::finalize() {
-    normalize(_hist_multiplicity);
-  }
-
-
-
+      setBeams(PROTON, ANTIPROTON);
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+    }
+    
+
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      _hist_multiplicity = bookHistogram1D(1, 1, 1);
+    }
+
+
+    void analyze(const Event& event) {
+      const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Get the event weight
+      const double weight = event.weight();
+      
+      // Fill histo of charged multiplicity distribution
+      _hist_multiplicity->fill(numParticles, weight);
+    }
+    
+    
+    void finalize() {
+      normalize(_hist_multiplicity);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_hist_multiplicity;
+    //@}
+    
+  };
+  
+  
+  
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<E735_1998_S3905616> plugin_E735_1998_S3905616;
-
+  
 }

Modified: trunk/src/Analyses/ExampleAnalysis.cc
==============================================================================
--- trunk/src/Analyses/ExampleAnalysis.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/ExampleAnalysis.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,7 +1,7 @@
 // -*- C++ -*-
+#include "Rivet/Analysis.hh"
 #include "Rivet/Tools/Logging.hh"
 #include "Rivet/RivetAIDA.hh"
-#include "Rivet/Analyses/ExampleAnalysis.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 #include "Rivet/Projections/FastJets.hh"
@@ -10,92 +10,117 @@
 #include "Rivet/Projections/Sphericity.hh"
 
 namespace Rivet {
+  
+  
+  /// @brief Just measures a few random things as an example.
+  class ExampleAnalysis : public Analysis {
+  public:
+    
+    /// Constructor
+    ExampleAnalysis()
+      : Analysis("EXAMPLE")
+    {
+      const FinalState cnfs(-4, 4, 2*GeV);
+      const ChargedFinalState cfs(-4, 4, 2*GeV);
+      addProjection(cnfs, "FS");
+      addProjection(cfs, "CFS");
+      addProjection(FastJets(cnfs, FastJets::KT, 0.7), "Jets");
+      addProjection(Multiplicity(cfs), "CMult");
+      addProjection(Multiplicity(cnfs), "CNMult");
+      addProjection(Thrust(cfs), "Thrust");
+      addProjection(Sphericity(cfs), "Sphericity");
+    }
+    
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      // Using histogram auto-booking is preferable if there are comparison datasets in HepData.
+      // Since this is just a demo analysis, there is no associated paper!
+
+      _histTot         = bookHistogram1D("TotalMult", 100, -0.5, 99.5);
+      _histChTot       = bookHistogram1D("TotalChMult", 50, -1.0, 99.0);
+      _histHadrTot     = bookHistogram1D("HadrTotalMult", 100, -0.5, 99.5);
+      _histHadrChTot   = bookHistogram1D("HadrTotalChMult", 50, -1.0, 99.0);
+
+      double edges[11] = { 0.5, 0.6, 0.7, 0.80, 0.85, 0.9, 0.92, 0.94, 0.96, 0.98, 1.0 };
+      vector<double> vedges(edges, edges+11);
+      _histThrust      = bookHistogram1D("Thrust", vedges);
+      _histMajor       = bookHistogram1D("Major", 10, 0.0, 0.6);
+      _histSphericity  = bookHistogram1D("Sphericity", 10, 0.0, 0.8);
+      _histAplanarity  = bookHistogram1D("Aplanarity", 10, 0.0, 0.3);
+    }
 
 
-  // Constructor
-  ExampleAnalysis::ExampleAnalysis()
-    : Analysis("EXAMPLE")
-  {
-    const FinalState cnfs(-4, 4, 2*GeV);
-    const ChargedFinalState cfs(-4, 4, 2*GeV);
-    addProjection(cnfs, "FS");
-    addProjection(cfs, "CFS");
-    addProjection(FastJets(cnfs, FastJets::KT, 0.7), "Jets");
-    addProjection(Multiplicity(cfs), "CMult");
-    addProjection(Multiplicity(cnfs), "CNMult");
-    addProjection(Thrust(cfs), "Thrust");
-    addProjection(Sphericity(cfs), "Sphericity");
-  }
-
-
-  // Book histograms
-  void ExampleAnalysis::init() {
-    // Using histogram auto-booking is preferable if there are comparison datasets in HepData.
-    // Since this is just a demo analysis, there is no associated paper!
-
-    _histTot         = bookHistogram1D("TotalMult", 100, -0.5, 99.5);
-    _histChTot       = bookHistogram1D("TotalChMult", 50, -1.0, 99.0);
-    _histHadrTot     = bookHistogram1D("HadrTotalMult", 100, -0.5, 99.5);
-    _histHadrChTot   = bookHistogram1D("HadrTotalChMult", 50, -1.0, 99.0);
-
-    double edges[11] = { 0.5, 0.6, 0.7, 0.80, 0.85, 0.9, 0.92, 0.94, 0.96, 0.98, 1.0 };
-    vector<double> vedges(edges, edges+11);
-    _histThrust      = bookHistogram1D("Thrust", vedges);
-    _histMajor       = bookHistogram1D("Major", 10, 0.0, 0.6);
-    _histSphericity  = bookHistogram1D("Sphericity", 10, 0.0, 0.8);
-    _histAplanarity  = bookHistogram1D("Aplanarity", 10, 0.0, 0.3);
-  }
-
-
-  // Do the analysis
-  void ExampleAnalysis::analyze(const Event& event) {
-    // Analyse and print some info
-    const Multiplicity& cm = applyProjection<Multiplicity>(event, "CMult");
-    const Multiplicity& cnm = applyProjection<Multiplicity>(event, "CNMult");
-    getLog() << Log::DEBUG << "Total multiplicity = " << cnm.totalMultiplicity()  << endl;
-    getLog() << Log::DEBUG << "Total charged multiplicity = " << cm.totalMultiplicity()   << endl;
-    getLog() << Log::DEBUG << "Hadron multiplicity = " << cnm.hadronMultiplicity() << endl;
-    getLog() << Log::DEBUG << "Hadron charged multiplicity = " << cm.hadronMultiplicity()  << endl;
-
-    const Thrust& t = applyProjection<Thrust>(event, "Thrust");
-    getLog() << Log::DEBUG << "Thrust = " << t.thrust() << endl;
-
-    const Sphericity& s = applyProjection<Sphericity>(event, "Sphericity");
-    getLog() << Log::DEBUG << "Sphericity = " << s.sphericity() << endl;
-    getLog() << Log::DEBUG << "Aplanarity = " << s.aplanarity() << endl;
-
-    size_t num_b_jets = 0;
-    const Jets jets = applyProjection<FastJets>(event, "Jets").jets();
-    foreach (const Jet& j, jets) {
-      if (j.containsBottom()) ++num_b_jets;
+    /// Do the analysis
+    void analyze(const Event& event) {
+      // Analyse and print some info
+      const Multiplicity& cm = applyProjection<Multiplicity>(event, "CMult");
+      const Multiplicity& cnm = applyProjection<Multiplicity>(event, "CNMult");
+      getLog() << Log::DEBUG << "Total multiplicity = " << cnm.totalMultiplicity()  << endl;
+      getLog() << Log::DEBUG << "Total charged multiplicity = " << cm.totalMultiplicity()   << endl;
+      getLog() << Log::DEBUG << "Hadron multiplicity = " << cnm.hadronMultiplicity() << endl;
+      getLog() << Log::DEBUG << "Hadron charged multiplicity = " << cm.hadronMultiplicity()  << endl;
+      
+      const Thrust& t = applyProjection<Thrust>(event, "Thrust");
+      getLog() << Log::DEBUG << "Thrust = " << t.thrust() << endl;
+      
+      const Sphericity& s = applyProjection<Sphericity>(event, "Sphericity");
+      getLog() << Log::DEBUG << "Sphericity = " << s.sphericity() << endl;
+      getLog() << Log::DEBUG << "Aplanarity = " << s.aplanarity() << endl;
+      
+      size_t num_b_jets = 0;
+      const Jets jets = applyProjection<FastJets>(event, "Jets").jets();
+      foreach (const Jet& j, jets) {
+        if (j.containsBottom()) ++num_b_jets;
+      }
+      getLog() << Log::DEBUG << "#B-jets = " << num_b_jets << endl;
+      
+      // Fill histograms
+      const double weight = event.weight();
+      _histTot->fill(cnm.totalMultiplicity(), weight);
+      _histChTot->fill(cm.totalMultiplicity(), weight);
+      _histHadrTot->fill(cnm.hadronMultiplicity(), weight);
+      _histHadrChTot->fill(cm.hadronMultiplicity(), weight);
+      _histThrust->fill(t.thrust(), weight);
+      _histMajor->fill(t.thrustMajor(), weight);
+      _histSphericity->fill(s.sphericity(), weight);
+      _histAplanarity->fill(s.aplanarity(), weight);
     }
-    getLog() << Log::DEBUG << "#B-jets = " << num_b_jets << endl;
+    
+    
+    /// Finalize
+    void finalize() { 
+      normalize(_histTot);
+      normalize(_histChTot);
+      normalize(_histHadrTot);
+      normalize(_histHadrChTot);
+      normalize(_histThrust);
+      normalize(_histMajor);
+      normalize(_histSphericity);
+      normalize(_histAplanarity);
+    }
+    //@}
+
 
-    // Fill histograms
-    const double weight = event.weight();
-    _histTot->fill(cnm.totalMultiplicity(), weight);
-    _histChTot->fill(cm.totalMultiplicity(), weight);
-    _histHadrTot->fill(cnm.hadronMultiplicity(), weight);
-    _histHadrChTot->fill(cm.hadronMultiplicity(), weight);
-    _histThrust->fill(t.thrust(), weight);
-    _histMajor->fill(t.thrustMajor(), weight);
-    _histSphericity->fill(s.sphericity(), weight);
-    _histAplanarity->fill(s.aplanarity(), weight);
-  }
-
-
-  // Finalize
-  void ExampleAnalysis::finalize() { 
-    normalize(_histTot);
-    normalize(_histChTot);
-    normalize(_histHadrTot);
-    normalize(_histHadrChTot);
-    normalize(_histThrust);
-    normalize(_histMajor);
-    normalize(_histSphericity);
-    normalize(_histAplanarity);
-  }
+  private:
+    
+    //@{
+    /// Histograms
+    AIDA::IHistogram1D* _histTot;
+    AIDA::IHistogram1D* _histChTot;
+    AIDA::IHistogram1D* _histHadrTot;
+    AIDA::IHistogram1D* _histHadrChTot;
+    AIDA::IHistogram1D* _histThrust;
+    AIDA::IHistogram1D* _histMajor;
+    AIDA::IHistogram1D* _histSphericity;
+    AIDA::IHistogram1D* _histAplanarity;
+    //@}
 
+  };
+    
 
 
   // This global object acts as a hook for the plugin system

Modified: trunk/src/Analyses/ExampleTree.cc
==============================================================================
--- trunk/src/Analyses/ExampleTree.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/ExampleTree.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,212 +1,293 @@
 // -*- C++ -*-
+#include "Rivet/Analysis.hh"
 #include "Rivet/Tools/Logging.hh"
-#include "Rivet/Analyses/ExampleTree.hh"
 #include "Rivet/Projections/ChargedLeptons.hh"
 #include "Rivet/Projections/TotalVisibleMomentum.hh"
 #include "Rivet/Projections/FastJets.hh"
 
+// ROOT stuff
+#ifdef HAVE_ROOT
+#include "TTree.h"
+#include "TFile.h"
+#include "TString.h"
+#endif
+
 namespace Rivet {
 
 
-  #ifndef HAVE_ROOT
-  
+  /// @brief Book and fill a ROOT tree with simulated data.
+  ///
+  /// This does some things, e.g. access parton level information, which
+  /// are not recommended in Rivet analyses, since the information is 
+  /// unphysical and so cannot be compared to data, and also may be generator dependent.
+  /// 
+  class ExampleTree : public Analysis {
+  public:
+
+    #ifndef HAVE_ROOT
+    
+    ExampleTree() : Analysis("EXAMPLETREE") { }
+    void init() {
+      getLog() << Log::WARN << "Rivet was not compiled against ROOT. ExampleTree will do nothing." << endl;
+    }
+    void analyze(const Event& event) { }
+    void finalize() { }
 
-  ExampleTree::ExampleTree() 
-    : Analysis("EXAMPLETREE") { }
 
-  void ExampleTree::init() {
-    getLog() << Log::WARN << "Rivet was not compiled against ROOT. ExampleTree will do nothing." << endl;
-  }
-  void ExampleTree::analyze(const Event& event) { }
-  void ExampleTree::finalize() { }
-
-
-  #else
-
-
-  ExampleTree::ExampleTree() 
-    : Analysis("EXAMPLETREE")
-  {
-    const FinalState fs(-4.0, 4.0, 0.0*GeV);
-    addProjection(fs, "FS");
-    addProjection(ChargedLeptons(fs), "ChLeptons");
-    addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
-    
-    /// Veto neutrinos, antineutrinos and LSP
-    VetoedFinalState vfs(fs);
-    vfs
-      .addVetoDetail(NU_E, 10.0*GeV, 50.0*GeV)
-      .addVetoPairId(NU_MU)
-      .addVetoPairId(NU_TAU)
-      .addVetoId(1000022); // LSP
-    addProjection(vfs, "VFS");
-    addProjection(TotalVisibleMomentum(vfs), "TotalVisMom");
-
-    ZFinder zs(fs, ELECTRON, 80*GeV, 100*GeV, 0.2);
-    addProjection(zs, "Zs");
-  }
-
-
-  void ExampleTree::init() {
-    // Choose cuts
-    _jet_pt_cut = 20*GeV;
-    _subj_pt_cut = 20*GeV;
-    _lepton_pt_cut = 20*GeV;
-    _store_partons = true;
-
-    _treeFileName = "rivetTree.root";
-
-    // Create a file for the Tree
-    _treeFile = new TFile(_treeFileName, "recreate");
-
-    // Book the ntuple.
-    _rivetTree = new TTree("Rivet Tree", "Rivet Example Tree");
-
-    // Event number 
-    _rivetTree->Branch("nevt", &_nevt, "nevt/I");
-
-    // Vector bosons
-    _rivetTree->Branch("nvb", &_nvb, "nvb/I");
-    _rivetTree->Branch("vbtype", &_vbtype, "vbtype[nvb]/I");
-    _rivetTree->Branch("vbvec", &_vbvec, "vbvec[nvb][4]/F");
-
-    _rivetTree->Branch("njet", &_njet, "njet/I");
-    _rivetTree->Branch("vjet", &_vjet, "vjet[njet][4]/F");
-
-    _rivetTree->Branch("nsub", &_nsub, "nsub/I");
-    _rivetTree->Branch("sjet3", &_sjet3, "sjet3[nsub][4]/F");
-    _rivetTree->Branch("ysubsj", &_ysubsj, "ysubsj[nsub][4]/F");
-
-    _rivetTree->Branch("nlep", &_nlep, "nlep/I");
-    _rivetTree->Branch("vlep", &_vlep, "vlep[nlep][4]/F");
-    _rivetTree->Branch("leptype", &_leptype, "leptype[nlep][3]/F");
-
-    _rivetTree->Branch("npart", &_npart, "npart/I");
-    _rivetTree->Branch("ppart", &_ppart, "ppart[npart][4]/F");
-    _rivetTree->Branch("pid", &_pid, "pid[npart]/I");
-    _rivetTree->Branch("mo", &_mo, "mo[npart]/I");  // first mother.
-
-    _rivetTree->Branch("esumr", &_esumr, "esumr[4]/F");
-  }
-
-
-
-  // Do the analysis
-  void ExampleTree::analyze(const Event & event) {
-    const GenEvent& ev = event.genEvent();
-    _nevt = ev.event_number();
-
-    // Get the vector bosons
-    _nvb = 0;
-    const FinalState& zs = applyProjection<FinalState>(event, "Zs");
-    foreach (const Particle& p, zs.particles()) {
-      const FourMomentum p4 = p.momentum();
-      _vbvec[_nvb][0] = p4.E()/GeV;
-      _vbvec[_nvb][1] = p4.px()/GeV;
-      _vbvec[_nvb][2] = p4.py()/GeV;
-      _vbvec[_nvb][3] = p4.pz()/GeV;
-      _vbtype[_nvb]   = 1;
-      ++_nvb;
+    #else
+
+
+    ExampleTree() 
+      : Analysis("EXAMPLETREE")
+    {
+      const FinalState fs(-4.0, 4.0, 0.0*GeV);
+      addProjection(fs, "FS");
+      addProjection(ChargedLeptons(fs), "ChLeptons");
+      addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
+      
+      /// Veto neutrinos, antineutrinos and LSP
+      VetoedFinalState vfs(fs);
+      vfs
+        .addVetoDetail(NU_E, 10.0*GeV, 50.0*GeV)
+        .addVetoPairId(NU_MU)
+        .addVetoPairId(NU_TAU)
+        .addVetoId(1000022); // LSP
+      addProjection(vfs, "VFS");
+      addProjection(TotalVisibleMomentum(vfs), "TotalVisMom");
+      
+      ZFinder zs(fs, ELECTRON, 80*GeV, 100*GeV, 0.2);
+      addProjection(zs, "Zs");
     }
+    
+    
+    void init() {
+      // Choose cuts
+      _jet_pt_cut = 20*GeV;
+      _subj_pt_cut = 20*GeV;
+      _lepton_pt_cut = 20*GeV;
+      _store_partons = true;
+      
+      _treeFileName = "rivetTree.root";
+      
+      // Create a file for the Tree
+      _treeFile = new TFile(_treeFileName, "recreate");
+      
+      // Book the ntuple.
+      _rivetTree = new TTree("Rivet Tree", "Rivet Example Tree");
+      
+      // Event number 
+      _rivetTree->Branch("nevt", &_nevt, "nevt/I");
+      
+      // Vector bosons
+      _rivetTree->Branch("nvb", &_nvb, "nvb/I");
+      _rivetTree->Branch("vbtype", &_vbtype, "vbtype[nvb]/I");
+      _rivetTree->Branch("vbvec", &_vbvec, "vbvec[nvb][4]/F");
+      
+      _rivetTree->Branch("njet", &_njet, "njet/I");
+      _rivetTree->Branch("vjet", &_vjet, "vjet[njet][4]/F");
+      
+      _rivetTree->Branch("nsub", &_nsub, "nsub/I");
+      _rivetTree->Branch("sjet3", &_sjet3, "sjet3[nsub][4]/F");
+      _rivetTree->Branch("ysubsj", &_ysubsj, "ysubsj[nsub][4]/F");
+      
+      _rivetTree->Branch("nlep", &_nlep, "nlep/I");
+      _rivetTree->Branch("vlep", &_vlep, "vlep[nlep][4]/F");
+      _rivetTree->Branch("leptype", &_leptype, "leptype[nlep][3]/F");
+      
+      _rivetTree->Branch("npart", &_npart, "npart/I");
+      _rivetTree->Branch("ppart", &_ppart, "ppart[npart][4]/F");
+      _rivetTree->Branch("pid", &_pid, "pid[npart]/I");
+      _rivetTree->Branch("mo", &_mo, "mo[npart]/I");  // first mother.
+      
+      _rivetTree->Branch("esumr", &_esumr, "esumr[4]/F");
+    }
+    
 
-    // Get the partons. This is generator-dependent and should not be
-    // used in normal analyses.
-    _npart = 0;
-    if (_store_partons) {
-      for (GenEvent::particle_const_iterator pi = event.genEvent().particles_begin(); 
-           pi != event.genEvent().particles_end(); ++pi ) {
-        // Only include particles which are documentation line (status >1) 
-        // The result/meaning will be generator dependent.
-        if ( (*pi)->status() >= 2 ) {
-          const FourMomentum p4 = (*pi)->momentum();
-          _ppart[_npart][1] = p4.px();
-          _ppart[_npart][2] = p4.py();
-          _ppart[_npart][3] = p4.pz();
-          _ppart[_npart][0] = p4.E();
-          _pid[_npart] = (*pi)->pdg_id();
-          const GenVertex* vertex = (*pi)->production_vertex();
-          // Get the first mother
-          if (vertex) {
-            if (vertex->particles_in_size()>0) {
-              GenVertex::particles_in_const_iterator p1 = vertex->particles_in_const_begin();
-              _mo[_npart] = (*p1)->pdg_id();
+    // Do the analysis
+    void analyze(const Event& event) {
+      const GenEvent& ev = event.genEvent();
+      _nevt = ev.event_number();
+      
+      // Get the vector bosons
+      _nvb = 0;
+      const FinalState& zs = applyProjection<FinalState>(event, "Zs");
+      foreach (const Particle& p, zs.particles()) {
+        const FourMomentum p4 = p.momentum();
+        _vbvec[_nvb][0] = p4.E()/GeV;
+        _vbvec[_nvb][1] = p4.px()/GeV;
+        _vbvec[_nvb][2] = p4.py()/GeV;
+        _vbvec[_nvb][3] = p4.pz()/GeV;
+        _vbtype[_nvb]   = 1;
+        ++_nvb;
+      }
+      
+      // Get the partons. This is generator-dependent and should not be
+      // used in normal analyses.
+      _npart = 0;
+      if (_store_partons) {
+        for (GenEvent::particle_const_iterator pi = event.genEvent().particles_begin(); 
+             pi != event.genEvent().particles_end(); ++pi ) {
+          // Only include particles which are documentation line (status >1) 
+          // The result/meaning will be generator dependent.
+          if ( (*pi)->status() >= 2 ) {
+            const FourMomentum p4 = (*pi)->momentum();
+            _ppart[_npart][1] = p4.px();
+            _ppart[_npart][2] = p4.py();
+            _ppart[_npart][3] = p4.pz();
+            _ppart[_npart][0] = p4.E();
+            _pid[_npart] = (*pi)->pdg_id();
+            const GenVertex* vertex = (*pi)->production_vertex();
+            // Get the first mother
+            if (vertex) {
+              if (vertex->particles_in_size()>0) {
+                GenVertex::particles_in_const_iterator p1 = vertex->particles_in_const_begin();
+                _mo[_npart] = (*p1)->pdg_id();
+              } else {
+                _mo[_npart] = 0;
+              }
             } else {
               _mo[_npart] = 0;
             }
-          } else {
-            _mo[_npart] = 0;
+            getLog() << Log::DEBUG << _npart << ":" << _pid[_npart] << endl;
+            ++_npart;
           }
-          getLog() << Log::DEBUG << _npart << ":" << _pid[_npart] << endl;
-          ++_npart;
         }
       }
-    }
-    
-    
-    // Get the jets in decreasing pT order.
-    const FastJets& jets = applyProjection<FastJets>(event, "Jets");
-    PseudoJets jetList = jets.pseudoJetsByPt();
-    _njet = 0;
-    _nsub = 0;
-    foreach (const fastjet::PseudoJet& j, jetList) {
-      if (j.perp() > _jet_pt_cut) {
-        _vjet[_njet][0] = j.e()/GeV;
-        _vjet[_njet][1] = j.px()/GeV;
-        _vjet[_njet][2] = j.py()/GeV;
-        _vjet[_njet][3] = j.pz()/GeV;
-        if (j.perp() > _subj_pt_cut) {
-          _sjet3[_nsub][0] = j.e()/GeV;
-          _sjet3[_nsub][1] = j.px()/GeV;
-          _sjet3[_nsub][2] = j.py()/GeV;
-          _sjet3[_nsub][3] = j.pz()/GeV;
-          const vector<double> ys = jets.ySubJet(j);
-          for (size_t i = 0; i < 4; ++i){
-            if (ys.size() > i) {
-              _ysubsj[_nsub][i] = ys.at(i);
-            } else {
-              _ysubsj[_nsub][i] = 0;
+      
+      
+      // Get the jets in decreasing pT order.
+      const FastJets& jets = applyProjection<FastJets>(event, "Jets");
+      PseudoJets jetList = jets.pseudoJetsByPt();
+      _njet = 0;
+      _nsub = 0;
+      foreach (const fastjet::PseudoJet& j, jetList) {
+        if (j.perp() > _jet_pt_cut) {
+          _vjet[_njet][0] = j.e()/GeV;
+          _vjet[_njet][1] = j.px()/GeV;
+          _vjet[_njet][2] = j.py()/GeV;
+          _vjet[_njet][3] = j.pz()/GeV;
+          if (j.perp() > _subj_pt_cut) {
+            _sjet3[_nsub][0] = j.e()/GeV;
+            _sjet3[_nsub][1] = j.px()/GeV;
+            _sjet3[_nsub][2] = j.py()/GeV;
+            _sjet3[_nsub][3] = j.pz()/GeV;
+            const vector<double> ys = jets.ySubJet(j);
+            for (size_t i = 0; i < 4; ++i){
+              if (ys.size() > i) {
+                _ysubsj[_nsub][i] = ys.at(i);
+              } else {
+                _ysubsj[_nsub][i] = 0;
+              }
             }
+            ++_nsub;	 
           }
-          ++_nsub;	 
+          ++_njet;
         }
-        ++_njet;
       }
+      
+      // Loop over leptons
+      _nlep = 0;
+      const ChargedLeptons& cl = applyProjection<ChargedLeptons>(event, "ChLeptons");
+      foreach (const Particle& p, cl.chargedLeptons()) {
+        const FourMomentum p4 = p.momentum();
+        if (p4.pT() > _lepton_pt_cut) {
+          _vlep[_nlep][0] = p4.E()/GeV;
+          _vlep[_nlep][1] = p4.px()/GeV;
+          _vlep[_nlep][2] = p4.py()/GeV;
+          _vlep[_nlep][3] = p4.pz()/GeV;
+          ++_nlep;
+        }
+      }
+      
+      // Missing Et/total energy
+      const TotalVisibleMomentum& tvm = applyProjection<TotalVisibleMomentum>(event, "TotalVisMom");
+      _esumr[0] = tvm.momentum().E()/GeV;
+      _esumr[1] = tvm.momentum().px()/GeV;
+      _esumr[2] = tvm.momentum().py()/GeV;
+      _esumr[3] = tvm.momentum().pz()/GeV;
+      
+      // Finally fill the tree
+      _rivetTree->Fill();
     }
     
-    // Loop over leptons
-    _nlep = 0;
-    const ChargedLeptons& cl = applyProjection<ChargedLeptons>(event, "ChLeptons");
-    foreach (const Particle& p, cl.chargedLeptons()) {
-      const FourMomentum p4 = p.momentum();
-      if (p4.pT() > _lepton_pt_cut) {
-		_vlep[_nlep][0] = p4.E()/GeV;
-        _vlep[_nlep][1] = p4.px()/GeV;
-        _vlep[_nlep][2] = p4.py()/GeV;
-		_vlep[_nlep][3] = p4.pz()/GeV;
-        ++_nlep;
-      }
+    
+    // Finalize
+    void finalize() { 
+      // Write the tree to file.
+      _rivetTree->Write();
     }
     
-    // Missing Et/total energy
-    const TotalVisibleMomentum& tvm = applyProjection<TotalVisibleMomentum>(event, "TotalVisMom");
-	_esumr[0] = tvm.momentum().E()/GeV;
-    _esumr[1] = tvm.momentum().px()/GeV;
-    _esumr[2] = tvm.momentum().py()/GeV;
-	_esumr[3] = tvm.momentum().pz()/GeV;
-
-    // Finally fill the tree
-    _rivetTree->Fill();
-  }
-  
-  
-  // Finalize
-  void ExampleTree::finalize() { 
-    // Write the tree to file.
-    _rivetTree->Write();
-  }
+    //@}
+
+
+  private:
+
+    /// The tree
+    TTree* _rivetTree;
+    
+    /// The file for the Tree
+    TFile* _treeFile;
+
+    /// The filename
+    TString _treeFileName;
+
+
+    /// @name The ntuple variables.
+    //@{
+    /// Event number
+    int _nevt;            
+
+    /// Number of W bosons
+    int _nvb;             
+    /// 4 momentum of W bosons.
+    float _vbvec[8][4];
+    /// Type (i.e. decay mode) of W bosons.
+    int _vbtype[8]; 
+
+    /// Number of jets
+    int _njet; 
+    /// Four momentum of the jets
+    float _vjet[50][4]; 
+
+    /// Number of jets for which the subjet analysis was performed.
+    int _nsub; 
+    /// Four vector of jets for which we found subjets.
+    float _sjet3[200][4];
+    /// y 1->2, 2->3, 3->4, 4->5 for the above jets.
+    float _ysubsj[200][4];
+
+    /// Number of leptons
+    int _nlep;
+    /// Lepton types
+    int _leptype[150][3];
+    float _vlep[150][4];
+
+    /// Number of partons
+    int _npart; 
+    float _ppart[4000][4];
+    int _pid[4000];
+    int _mo[4000];
+
+    /// Total visible momentum
+    float _esumr[4];
+    //@}
+
+    /// Minimum pt of jets which will go into the tree.
+    int _jet_pt_cut;
+
+    /// Minimum pt of jets which will have y evaluated and stored.
+    int _subj_pt_cut;
+
+    /// Minimum pt of charged leptons which will go into the tree.
+    int _lepton_pt_cut;
+
+    /// Store the partons or not?
+    bool _store_partons;
+
+    #endif
+
+  };
 
-  
-  #endif
   
 
   // This global object acts as a hook for the plugin system

Modified: trunk/src/Analyses/SFM_1984_S1178091.cc
==============================================================================
--- trunk/src/Analyses/SFM_1984_S1178091.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/SFM_1984_S1178091.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,145 +1,151 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Tools/Logging.hh"
-#include "Rivet/Analyses/SFM_1984_S1178091.hh"
 #include "Rivet/Projections/Beam.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 
 namespace Rivet {
 
+  class SFM_1984_S1178091 : public Analysis {
+  public:
 
-  SFM_1984_S1178091::SFM_1984_S1178091() : Analysis("SFM_1984_S1178091") {
-    /// @todo Set approriate for your analysis
-    setBeams(PROTON, PROTON);
-    addProjection(Beam(), "Beam");
-    addProjection(ChargedFinalState(), "FS");
-    
-    /// @todo Set whether your finalize method needs the generator cross section
-    //setNeedsCrossSection(true);
-
-    /// @todo Initialise and register projections here
-  }
-
-
-  void SFM_1984_S1178091::init() {
-
-       _hist_multiplicity_inel_30 = bookHistogram1D(1, 1, 1); 
-       _hist_multiplicity_inel_45 = bookHistogram1D(1, 1, 2);
-       _hist_multiplicity_inel_53 = bookHistogram1D(1, 1, 3);
-       _hist_multiplicity_inel_63 = bookHistogram1D(1, 1, 4);
-       _hist_multiplicity_nsd_30 = bookHistogram1D(2, 1, 1);
-       _hist_multiplicity_nsd_45 = bookHistogram1D(2, 1, 2);
-       _hist_multiplicity_nsd_53 = bookHistogram1D(2, 1, 3);
-       _hist_multiplicity_nsd_63 = bookHistogram1D(2, 1, 4);
-
-  }
-
-
-  void SFM_1984_S1178091::analyze(const Event& event) {
-    Log log = getLog();
-    const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
-    const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
-    const size_t numParticles = fs.particles().size();
-    bool isDiffractive = false;
-
-    // Get the event weight
-    const double weight = event.weight();
-
-    // Decide whether event is of diffractive type or not 
-    // FIXME: it is not so clear in the paper how this distinction is made.
-    // They seem to require either exactly one particle with Feynman x larger
-    // than 0.8 to call an event diffractive or that there are no tracks
-    // reconstructed in either of the two hemispheres. For the latter
-    // they require in addition also the number of cahrged particles
-    // to be smaller than 8.
-
-    int n_left(0), n_right(0), n_large_x(0);
-    
-    foreach (const Particle& p, fs.particles()) {
-      // Calculate the particles Feynman x  
-      const double x_feyn = 2.0 * (p.momentum().pz()/GeV) / sqrtS;
-      if (fabs(x_feyn) > 0.8 ) n_large_x += 1;
-
-      // Pseudorapidity
-      const double eta = p.momentum().pseudorapidity();
-      if (eta > 0.0) n_right += 1;
-      else if (eta < 0.0) n_left += 1;
+    /// Constructor
+    SFM_1984_S1178091() : Analysis("SFM_1984_S1178091") {
+      setBeams(PROTON, PROTON);
+      addProjection(Beam(), "Beam");
+      addProjection(ChargedFinalState(), "FS");
     }
-    
-    // Not sure about the "=="
-    if (n_large_x == 1) isDiffractive = true;
-    
-    // FIXME: Not sure about the "== 1", the paper says no charged particle
-    // that was reconstructed so the incoming protons must run down the beam
-    // pipe. Since we look a the complete final state here no particle being
-    // reconstructed should be equal to one particle (proton) in each
-    // hemisphere.  The "< 8" is also not certain.
-    if ((n_left == 1 || n_right == 1) && numParticles < 8 ) {
-        isDiffractive = true;
-    }
-    
-    getLog() << Log::DEBUG << "N_left: " << n_left << ", N_right: " << n_right << ", N_large_x: " << n_large_x << endl;
 
 
+    /// @name Analysis methods
+    //@{
 
-    // Fill histos of charged multiplicity distributions
-    // The inelastic samples are said to contain also diffractive events.
-    //
-    if (fuzzyEquals(sqrtS, 30.4/GeV, 1E-1)) {
-      if (isDiffractive==true) {
-        _hist_multiplicity_nsd_30 ->fill(numParticles, weight);
-        _hist_multiplicity_inel_30->fill(numParticles, weight);
+    void init() {
+      _hist_multiplicity_inel_30 = bookHistogram1D(1, 1, 1); 
+      _hist_multiplicity_inel_45 = bookHistogram1D(1, 1, 2);
+      _hist_multiplicity_inel_53 = bookHistogram1D(1, 1, 3);
+      _hist_multiplicity_inel_63 = bookHistogram1D(1, 1, 4);
+      _hist_multiplicity_nsd_30 = bookHistogram1D(2, 1, 1);
+      _hist_multiplicity_nsd_45 = bookHistogram1D(2, 1, 2);
+      _hist_multiplicity_nsd_53 = bookHistogram1D(2, 1, 3);
+      _hist_multiplicity_nsd_63 = bookHistogram1D(2, 1, 4);  
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Decide whether event is of diffractive type or not 
+      // FIXME: it is not so clear in the paper how this distinction is made.
+      // They seem to require either exactly one particle with Feynman x larger
+      // than 0.8 to call an event diffractive or that there are no tracks
+      // reconstructed in either of the two hemispheres. For the latter
+      // they require in addition also the number of cahrged particles
+      // to be smaller than 8.
+      
+      int n_left(0), n_right(0), n_large_x(0);
+      foreach (const Particle& p, fs.particles()) {
+        // Calculate the particles' Feynman x  
+        const double x_feyn = 2.0 * (p.momentum().pz()/GeV) / sqrtS;
+        if (fabs(x_feyn) > 0.8 ) n_large_x += 1;
+        
+        // Pseudorapidity
+        const double eta = p.momentum().pseudorapidity();
+        if (eta > 0.0) n_right += 1;
+        else if (eta < 0.0) n_left += 1;
       }
-      else {
-        _hist_multiplicity_inel_30->fill(numParticles, weight);
-      }  
-    } 
-    else if (fuzzyEquals(sqrtS, 44/GeV, 1E-1)) {
-      if (isDiffractive==true) {
-        _hist_multiplicity_nsd_45 ->fill(numParticles, weight);
-        _hist_multiplicity_inel_45->fill(numParticles, weight);
+      
+      // Not sure about the "=="
+      /// @todo Numerical precision problem!
+      bool isDiffractive = false;
+      if (n_large_x == 1) isDiffractive = true;
+      
+      // FIXME: Not sure about the "== 1", the paper says no charged particle
+      // that was reconstructed so the incoming protons must run down the beam
+      // pipe. Since we look a the complete final state here no particle being
+      // reconstructed should be equal to one particle (proton) in each
+      // hemisphere.  The "< 8" is also not certain.
+      if ((n_left == 1 || n_right == 1) && numParticles < 8 ) {
+        isDiffractive = true;
       }
-      else {
-        _hist_multiplicity_inel_45->fill(numParticles, weight);
-      }  
-    }
-    else if (fuzzyEquals(sqrtS, 53/GeV, 1E-1)) {
-      if (isDiffractive==true) {
-        _hist_multiplicity_nsd_53 ->fill(numParticles, weight);
-        _hist_multiplicity_inel_53->fill(numParticles, weight);
+      
+      getLog() << Log::DEBUG << "N_left: " << n_left << ", N_right: " 
+               << n_right << ", N_large_x: " << n_large_x << endl;
+      
+      
+      // Fill histos of charged multiplicity distributions
+      // The inelastic samples are said to contain also diffractive events.
+      //
+      if (fuzzyEquals(sqrtS, 30.4/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_30 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_30->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_30->fill(numParticles, weight);
+        }  
+      } 
+      else if (fuzzyEquals(sqrtS, 44/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_45 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_45->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_45->fill(numParticles, weight);
+        }  
       }
-      else {
-        _hist_multiplicity_inel_53->fill(numParticles, weight);
-      }  
-    }
-    else if (fuzzyEquals(sqrtS, 63/GeV, 1E-1)) {
-      if (isDiffractive==true) {
-        _hist_multiplicity_nsd_63 ->fill(numParticles, weight);
-        _hist_multiplicity_inel_63->fill(numParticles, weight);
+      else if (fuzzyEquals(sqrtS, 53/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_53 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_53->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_53->fill(numParticles, weight);
+        }  
+      }
+      else if (fuzzyEquals(sqrtS, 63/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_63 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_63->fill(numParticles, weight);
+        }
+        else {
+          _hist_multiplicity_inel_63->fill(numParticles, weight);
+        }  
       }
-      else {
-        _hist_multiplicity_inel_63->fill(numParticles, weight);
-      }  
+      
     }
     
+    
+    void finalize() {
+      normalize(_hist_multiplicity_inel_30);
+      normalize(_hist_multiplicity_inel_45);
+      normalize(_hist_multiplicity_inel_53);
+      normalize(_hist_multiplicity_inel_63);
+      normalize(_hist_multiplicity_nsd_30 );
+      normalize(_hist_multiplicity_nsd_45 );
+      normalize(_hist_multiplicity_nsd_53 );
+      normalize(_hist_multiplicity_nsd_63 );
+    }
+    //@}
+    
 
+  private:
+    
+    /// @name Histograms
+    //@{
 
-  }
-
-
-  void SFM_1984_S1178091::finalize() {
-
-       normalize(_hist_multiplicity_inel_30);
-       normalize(_hist_multiplicity_inel_45);
-       normalize(_hist_multiplicity_inel_53);
-       normalize(_hist_multiplicity_inel_63);
-       normalize(_hist_multiplicity_nsd_30 );
-       normalize(_hist_multiplicity_nsd_45 );
-       normalize(_hist_multiplicity_nsd_53 );
-       normalize(_hist_multiplicity_nsd_63 );
+    AIDA::IHistogram1D *_hist_multiplicity_inel_30;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_45;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_53;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_63;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_30;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_45;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_53;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_63;
+    //@}
 
-  }
+  };
 
 
 

Modified: trunk/src/Analyses/STAR_2006_S6870392.cc
==============================================================================
--- trunk/src/Analyses/STAR_2006_S6870392.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/STAR_2006_S6870392.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,5 +1,5 @@
 // -*- C++ -*-
-#include "Rivet/Analyses/STAR_2006_S6870392.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/Tools/Logging.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/FastJets.hh"
@@ -7,68 +7,81 @@
 
 namespace Rivet {
 
-
-  STAR_2006_S6870392::STAR_2006_S6870392()
-    : Analysis("STAR_2006_S6870392")
-  {
-    setBeams(PROTON, PROTON);
-
-    /// @todo Use cross-section from generator
-    //setNeedsCrossSection(true);
-
-    //full final state
-    FinalState fs(-2.0, 2.0);
-    addProjection(fs, "FS");
-    // R=0.4, pTmin=0, seed_threshold=0.5:
-    addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.4, 0.0, 0.5), "MidpointJets");
-  } 
-
-
-  // Book histograms
-  void STAR_2006_S6870392::init() {
-    _h_jet_pT_MB = bookHistogram1D(1, 1, 1);
-    _h_jet_pT_HT = bookHistogram1D(2, 1, 1);
-  }
-
-
-
-  // Do the analysis 
-  void STAR_2006_S6870392::analyze(const Event & event) {
-    double weight = event.weight();
-
-    // Skip if the event is empty
-    const FinalState& fs = applyProjection<FinalState>(event, "FS");
-    if (fs.isEmpty()) {
-      getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
-               << " because no final state found " << endl;
-      vetoEvent;
+  /// @brief inclusive jet cross-section in pp at 200 GeV
+  class STAR_2006_S6870392 : public Analysis {
+  public:
+    
+    /// Constructor
+    STAR_2006_S6870392()
+      : Analysis("STAR_2006_S6870392")
+    {
+      setBeams(PROTON, PROTON);
+      FinalState fs(-2.0, 2.0);
+      addProjection(fs, "FS");
+      // R=0.4, pTmin=0, seed_threshold=0.5:
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.4, 0.0, 0.5), "MidpointJets");
+    } 
+
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_jet_pT_MB = bookHistogram1D(1, 1, 1);
+      _h_jet_pT_HT = bookHistogram1D(2, 1, 1);
     }
 
-    // Find jets
-    const FastJets& jetpro = applyProjection<FastJets>(event, "MidpointJets");
-    const PseudoJets& jets = jetpro.pseudoJetsByPt();
-
-    foreach (fastjet::PseudoJet jet, jets) {
-      if (fabs(jets[0].eta()) < 0.8 && fabs(jets[0].eta()) > 0.2) {
-        _h_jet_pT_MB->fill(jet.perp(), weight);
-        _h_jet_pT_HT->fill(jet.perp(), weight);
+    /// Do the analysis 
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.isEmpty()) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no final state found " << endl;
+        vetoEvent;
+      }
+      
+      // Find jets
+      const FastJets& jetpro = applyProjection<FastJets>(event, "MidpointJets");
+      const PseudoJets& jets = jetpro.pseudoJetsByPt();
+      
+      foreach (fastjet::PseudoJet jet, jets) {
+        if (fabs(jets[0].eta()) < 0.8 && fabs(jets[0].eta()) > 0.2) {
+          _h_jet_pT_MB->fill(jet.perp(), weight);
+          _h_jet_pT_HT->fill(jet.perp(), weight);
+        }
       }
     }
-  }
-
-
-
-  // Finalize
-  void STAR_2006_S6870392::finalize() {
-    /// @todo Use the generator cross-section
-    //_h_total_cross_section->fill(crossSection());
-    normalize(_h_jet_pT_MB, 16603100);
-    normalize(_h_jet_pT_HT, 1808234);
-  }
-
-
-
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      /// @todo Use the generator cross-section
+      //_h_total_cross_section->fill(crossSection());
+      normalize(_h_jet_pT_MB, 16603100);
+      normalize(_h_jet_pT_HT, 1808234);
+    }
+    
+    //@}
+    
+    
+  private:
+    
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_jet_pT_MB;
+    AIDA::IHistogram1D * _h_jet_pT_HT;
+    //@}
+    
+  };
+  
+  
+  
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<STAR_2006_S6870392> plugin_STAR_2006_S6870392;
-
+  
 }

Modified: trunk/src/Analyses/STAR_2008_S7993412.cc
==============================================================================
--- trunk/src/Analyses/STAR_2008_S7993412.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/STAR_2008_S7993412.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,75 +1,90 @@
 // -*- C++ -*-
-#include "Rivet/Analyses/STAR_2008_S7993412.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/Tools/Logging.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 #include "Rivet/RivetAIDA.hh"
 
 namespace Rivet {
 
-
-  STAR_2008_S7993412::STAR_2008_S7993412()
-    : Analysis("STAR_2008_S7993412")
-  {
-    setBeams(PROTON, PROTON);
-
-    //full final state
-    ChargedFinalState fs(-1.0, 1.0, 1.0*GeV);
-    addProjection(fs, "FS");
-  } 
-
-
-  // Book histograms
-  void STAR_2008_S7993412::init() {
-    _h_Y_jet_trigger = bookProfile1D(1, 1, 1);
-    _h_Y_jet_associated = bookProfile1D(2, 1, 1);
-  }
-
-
-
-  // Do the analysis 
-  void STAR_2008_S7993412::analyze(const Event & event) {
-    double weight = event.weight();
-
-    // Skip if the event is empty
-    const FinalState& fs = applyProjection<FinalState>(event, "FS");
-    if (fs.isEmpty()) {
-      getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
-               << " because no final state found " << endl;
-      vetoEvent;
+  /// @brief di-hadron correlations in d-Au at 200 GeV
+  class STAR_2008_S7993412 : public Analysis {
+  public:
+
+    STAR_2008_S7993412()
+      : Analysis("STAR_2008_S7993412")
+    {
+      setBeams(PROTON, PROTON);
+      ChargedFinalState fs(-1.0, 1.0, 1.0*GeV);
+      addProjection(fs, "FS");
+    } 
+    
+    
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_Y_jet_trigger = bookProfile1D(1, 1, 1);
+      _h_Y_jet_associated = bookProfile1D(2, 1, 1);
     }
 
-    foreach (const Particle& tp, fs.particles()) {
-      const double triggerpT = tp.momentum().pT();
-      if (triggerpT >= 2.0 && triggerpT < 5.0) {
-        int N_associated = 0;
-        foreach (const Particle& ap, fs.particles()) {
-          if (ap.momentum().pT() > 1.5 &&
-              ap.momentum().pT() < triggerpT &&
-              deltaPhi(tp.momentum().phi(), ap.momentum().phi()) < 1 &&
-              fabs(tp.momentum().pseudorapidity() - ap.momentum().pseudorapidity()) < 1.75) {
-            N_associated += 1;
+
+    /// Do the analysis 
+    void analyze(const Event & event) {
+      const double weight = event.weight();
+
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.isEmpty()) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no final state found " << endl;
+        vetoEvent;
+      }
+      
+      foreach (const Particle& tp, fs.particles()) {
+        const double triggerpT = tp.momentum().pT();
+        if (triggerpT >= 2.0 && triggerpT < 5.0) {
+          int N_associated = 0;
+          foreach (const Particle& ap, fs.particles()) {
+            if (ap.momentum().pT() > 1.5 &&
+                ap.momentum().pT() < triggerpT &&
+                deltaPhi(tp.momentum().phi(), ap.momentum().phi()) < 1 &&
+                fabs(tp.momentum().pseudorapidity() - ap.momentum().pseudorapidity()) < 1.75) {
+              N_associated += 1;
+            }
           }
+          //const double dPhidEta = 2 * 2*1.75;
+          //_h_Y_jet_trigger->fill(triggerpT, N_associated/dPhidEta, weight);
+          _h_Y_jet_trigger->fill(triggerpT, N_associated, weight);
         }
-        //const double dPhidEta = 2 * 2*1.75;
-        //_h_Y_jet_trigger->fill(triggerpT, N_associated/dPhidEta, weight);
-        _h_Y_jet_trigger->fill(triggerpT, N_associated, weight);
       }
     }
-  }
-
+    
+    
+    /// Finalize
+    void finalize() {
+      /// @todo Use the generator cross-section
+      //_h_total_cross_section->fill(crossSection());
+      //normalize(_h_jet_pT_MB, 16603100);
+      //normalize(_h_jet_pT_HT, 1808234);
+    }
+    
+    //@}
 
 
-  // Finalize
-  void STAR_2008_S7993412::finalize() {
-    /// @todo Use the generator cross-section
-    //_h_total_cross_section->fill(crossSection());
-    //normalize(_h_jet_pT_MB, 16603100);
-    //normalize(_h_jet_pT_HT, 1808234);
-  }
+  private:
 
+    /// @name Histograms
+    //@{
+    AIDA::IProfile1D * _h_Y_jet_trigger;
+    AIDA::IProfile1D * _h_Y_jet_associated;
+    //@}
 
+  };
 
+    
+    
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<STAR_2008_S7993412> plugin_STAR_2008_S7993412;
-
+  
 }

Modified: trunk/src/Analyses/STAR_2009_UE_HELEN.cc
==============================================================================
--- trunk/src/Analyses/STAR_2009_UE_HELEN.cc	Mon Aug 31 11:19:45 2009	(r1787)
+++ trunk/src/Analyses/STAR_2009_UE_HELEN.cc	Mon Aug 31 11:42:10 2009	(r1788)
@@ -1,8 +1,7 @@
 // -*- C++ -*-
-#include "Rivet/Rivet.hh"
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Tools/Logging.hh"
-#include "Rivet/Analyses/STAR_2009_UE_HELEN.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/ChargedFinalState.hh"
 #include "Rivet/Projections/FastJets.hh"
@@ -10,185 +9,269 @@
 namespace Rivet {
 
 
-  STAR_2009_UE_HELEN::STAR_2009_UE_HELEN()
-    : Analysis("STAR_2009_UE_HELEN")  
-  { 
-    setBeams(PROTON, ANTIPROTON);
-    
-    // Final state for the jet finding
-    const FinalState fsj(-4.0, 4.0, 0.0*GeV);
-    addProjection(fsj, "FSJ");
-    /// @todo Does STAR really use a CDF midpoint?!?
-    addProjection(FastJets(fsj, FastJets::CDFMIDPOINT, 0.7), "Jets");
-    
-    // Final state for the sum(ET) distributions
-    const FinalState fs(-1.0, 1.0, 0.0*GeV);
-    addProjection(fs, "FS");
-    
-    // Charged final state for the distributions
-    const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
-    addProjection(cfs, "CFS");
-  }
-
-
-  // Book histograms
-  void STAR_2009_UE_HELEN::init() {
-    _hist_pnchg      = bookProfile1D( 1, 1, 1);
-    _hist_pmaxnchg   = bookProfile1D( 2, 1, 1);
-    _hist_pminnchg   = bookProfile1D( 3, 1, 1);
-    _hist_pdifnchg   = bookProfile1D( 4, 1, 1);
-    _hist_pcptsum    = bookProfile1D( 5, 1, 1);
-    _hist_pmaxcptsum = bookProfile1D( 6, 1, 1);
-    _hist_pmincptsum = bookProfile1D( 7, 1, 1);
-    _hist_pdifcptsum = bookProfile1D( 8, 1, 1);
-    _hist_pcptave    = bookProfile1D( 9, 1, 1);
-    //_hist_onchg   = bookProfile1D( 1, 1, 1, "Overall number of charged particles");
-    //_hist_ocptsum = bookProfile1D( 2, 1, 1, "Overall charged $p_\\perp$ sum");
-    //_hist_oetsum  = bookProfile1D( 3, 1, 1, "Overall $E_\\perp$ sum");
-  }
-
-
-  // Do the analysis
-  void STAR_2009_UE_HELEN::analyze(const Event& e) {
-
-    const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
-    if (fsj.particles().size() < 1) {
-      getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
-      vetoEvent;
+  /* STAR underlying event
+   * @author Hendrik Hoeth
+   */ 
+  class STAR_2009_UE_HELEN : public Analysis {
+  public:
+
+    /// Constructor
+    STAR_2009_UE_HELEN()
+      : Analysis("STAR_2009_UE_HELEN")  
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Final state for the jet finding
+      const FinalState fsj(-4.0, 4.0, 0.0*GeV);
+      addProjection(fsj, "FSJ");
+      /// @todo STAR doesn't really use a CDF midpoint algorithm!
+      addProjection(FastJets(fsj, FastJets::CDFMIDPOINT, 0.7), "Jets");
+      
+      // Final state for the sum(ET) distributions
+      const FinalState fs(-1.0, 1.0, 0.0*GeV);
+      addProjection(fs, "FS");
+      
+      // Charged final state for the distributions
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      addProjection(cfs, "CFS");
     }
 
-    const Jets jets = applyProjection<FastJets>(e, "Jets").jetsByPt();
-    getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
 
-    // We require the leading jet to be within |eta|<2
-    if (jets.size() < 1 || fabs(jets[0].momentum().eta()) >= 2) {
-      getLog() << Log::DEBUG << "Failed jet cut" << endl;
-      vetoEvent;
-    }
-    
-    const double jetphi = jets[0].momentum().phi();
-    const double jetpT  = jets[0].momentum().pT();
+    // /// @name Publication metadata
+    // //@{
 
-    // Get the event weight
-    const double weight = e.weight();
+    // /// Analysis name
+    // string name() const {
+    //   return "STAR_2009_UE_HELEN";
+    // }
+    // /// SPIRES key (IRN)
+    // string spiresId() const {
+    //   return "NONE";
+    // }
+    // /// A short description of the analysis.
+    // string summary() const {
+    //   return "CDF Run 2 underlying event in leading jet events";
+    // }
+    // /// Full description of the analysis, for the manual
+    // string description() const {
+    //   ostringstream os;
+    //   os << "";
+    //   return os.str();
+    // }
+    // /// Experiment which performed and published this analysis.
+    // string experiment() const {
+    //  return "STAR";
+    // }
+    // /// Collider on which the experiment was based
+    // string collider() const {
+    //  return "(RHIC pp 200 GeV)";
+    // }
+    // /// When published according to SPIRES
+    // string year() const {
+    //  return "2008";
+    // }
+    // /// Names & emails of paper/analysis authors.
+    // vector<string> authors() const {
+    //   vector<string> ret;
+    //   ret += "Hendrik Hoeth <hendrik.hoeth at cern.ch>";
+    //   return ret;
+    // }
+    // /// Information about the events needed as input for this analysis.
+    // string runInfo() const {
+    //   ostringstream os;
+    //   os << "* pp interactions at 200 GeV";
+    //   return os.str();
+    // }
+
+    // string status() const {
+    //   return "UNVALIDATED";
+    // }
+    // /// No journal or preprint references.
+    // vector<string> references() const {
+    //   vector<string> ret;
+    //   ret += "";
+    //   return ret;
+    // }
 
-    // Get the final states to work with for filling the distributions
-    const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+    // //@}
+    
 
-    size_t   numOverall(0),     numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0)  ;
-    double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
-    //double EtSumOverall(0.0), EtSumToward(0.0), EtSumTrans1(0.0), EtSumTrans2(0.0), EtSumAway(0.0);
-    double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
-
-    // Calculate all the charged stuff
-    foreach (const Particle& p, cfs.particles()) {
-      const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
-      const double pT = p.momentum().pT();
-      const double phi = p.momentum().phi();
-      /// @todo Jet and particle phi should have same ranges this way: check
-      const double rotatedphi = phi - jetphi;
-
-      ptSumOverall += pT;
-      ++numOverall;
-      if (pT > ptMaxOverall)
-        ptMaxOverall = pT;
-
-      if (dPhi < PI/3.0) {
-        ptSumToward += pT;
-        ++numToward;
-        if (pT > ptMaxToward)
-          ptMaxToward = pT;
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      _hist_pnchg      = bookProfile1D( 1, 1, 1);
+      _hist_pmaxnchg   = bookProfile1D( 2, 1, 1);
+      _hist_pminnchg   = bookProfile1D( 3, 1, 1);
+      _hist_pdifnchg   = bookProfile1D( 4, 1, 1);
+      _hist_pcptsum    = bookProfile1D( 5, 1, 1);
+      _hist_pmaxcptsum = bookProfile1D( 6, 1, 1);
+      _hist_pmincptsum = bookProfile1D( 7, 1, 1);
+      _hist_pdifcptsum = bookProfile1D( 8, 1, 1);
+      _hist_pcptave    = bookProfile1D( 9, 1, 1);
+      //_hist_onchg   = bookProfile1D( 1, 1, 1, "Overall number of charged particles");
+      //_hist_ocptsum = bookProfile1D( 2, 1, 1, "Overall charged $p_\\perp$ sum");
+      //_hist_oetsum  = bookProfile1D( 3, 1, 1, "Overall $E_\\perp$ sum");
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
+      if (fsj.particles().size() < 1) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
       }
-      else if (dPhi < 2*PI/3.0) {
-        if (rotatedphi <= PI) {
-          ptSumTrans1 += pT;
-          ++numTrans1;
-          if (pT > ptMaxTrans1)
-            ptMaxTrans1 = pT;
+      
+      const Jets jets = applyProjection<FastJets>(e, "Jets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
+      
+      // We require the leading jet to be within |eta|<2
+      if (jets.size() < 1 || fabs(jets[0].momentum().eta()) >= 2) {
+        getLog() << Log::DEBUG << "Failed jet cut" << endl;
+        vetoEvent;
+      }
+      
+      const double jetphi = jets[0].momentum().phi();
+      const double jetpT  = jets[0].momentum().pT();
+      
+      // Get the event weight
+      const double weight = e.weight();
+      
+      // Get the final states to work with for filling the distributions
+      const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+      
+      size_t   numOverall(0),     numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0)  ;
+      double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
+      //double EtSumOverall(0.0), EtSumToward(0.0), EtSumTrans1(0.0), EtSumTrans2(0.0), EtSumAway(0.0);
+      double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
+      
+      // Calculate all the charged stuff
+      foreach (const Particle& p, cfs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double pT = p.momentum().pT();
+        const double phi = p.momentum().phi();
+        /// @todo Jet and particle phi should have same ranges this way: check
+        const double rotatedphi = phi - jetphi;
+        
+        ptSumOverall += pT;
+        ++numOverall;
+        if (pT > ptMaxOverall)
+          ptMaxOverall = pT;
+        
+        if (dPhi < PI/3.0) {
+          ptSumToward += pT;
+          ++numToward;
+          if (pT > ptMaxToward)
+            ptMaxToward = pT;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            ptSumTrans1 += pT;
+            ++numTrans1;
+            if (pT > ptMaxTrans1)
+              ptMaxTrans1 = pT;
+          }
+          else {
+            ptSumTrans2 += pT;
+            ++numTrans2;
+            if (pT > ptMaxTrans2)
+              ptMaxTrans2 = pT;
+          }
         }
         else {
-          ptSumTrans2 += pT;
-          ++numTrans2;
-          if (pT > ptMaxTrans2)
-            ptMaxTrans2 = pT;
+          ptSumAway += pT;
+          ++numAway;
+          if (pT > ptMaxAway)
+            ptMaxAway = pT;
         }
-      }
-      else {
-        ptSumAway += pT;
-        ++numAway;
-        if (pT > ptMaxAway)
-          ptMaxAway = pT;
-      }
-    } // end charged particle loop
-
-
-    #if 0   
-    /// @todo Enable this part when we have the numbers from Rick Field
-
-    // And now the same business for all particles (including neutrals)
-    foreach (const Particle& p, fs.particles()) {
-      const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
-      const double ET = p.momentum().Et();
-      const double phi = p.momentum().phi();
-      const double rotatedphi = phi - jetphi;
-
-      EtSumOverall += ET;
-
-      if (dPhi < PI/3.0) {
-        EtSumToward += ET;
-      }
-      else if (dPhi < 2*PI/3.0) {
-        if (rotatedphi <= PI) {
-          EtSumTrans1 += ET;
+      } // end charged particle loop
+      
+      
+      #if 0   
+      /// @todo Enable this part when we have the numbers from Rick Field
+      
+      // And now the same business for all particles (including neutrals)
+      foreach (const Particle& p, fs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double ET = p.momentum().Et();
+        const double phi = p.momentum().phi();
+        const double rotatedphi = phi - jetphi;
+        
+        EtSumOverall += ET;
+        
+        if (dPhi < PI/3.0) {
+          EtSumToward += ET;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            EtSumTrans1 += ET;
+          }
+          else {
+            EtSumTrans2 += ET;
+          }
         }
         else {
-          EtSumTrans2 += ET;
+          EtSumAway += ET;
         }
+      } // end all particle loop
+      #endif
+      
+      
+      // Fill the histograms
+      //_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
+      _hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
+      _hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
+      //_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
+      
+      //_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
+      _hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
+      _hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
+      //_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
+      
+      //if (numToward > 0) {
+      //  _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
+      //  _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
+      //}
+      if ((numTrans1+numTrans2) > 0) {
+        _hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
+        //_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
       }
-      else {
-        EtSumAway += ET;
-      }
-    } // end all particle loop
-    #endif
-
-
-    // Fill the histograms
-    //_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
-    _hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
-    _hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
-    _hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
-    _hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
-    //_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
-
-    //_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
-    _hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
-    _hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
-    _hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
-    _hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
-    //_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
-
-    //if (numToward > 0) {
-    //  _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
-    //  _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
-    //}
-    if ((numTrans1+numTrans2) > 0) {
-      _hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
-      //_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
+      //if (numAway > 0) {
+      //  _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
+      //  _hist_acptmax->fill(jetpT, ptMaxAway, weight);
+      //}
+    }
+    
+    
+    void finalize() {  
+      //
     }
-    //if (numAway > 0) {
-    //  _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
-    //  _hist_acptmax->fill(jetpT, ptMaxAway, weight);
-    //}
-  }
 
+    //@}
 
-  void STAR_2009_UE_HELEN::finalize() {  
-    //
-  }
+  private:
 
+    AIDA::IProfile1D *_hist_pnchg;
+    AIDA::IProfile1D *_hist_pmaxnchg;
+    AIDA::IProfile1D *_hist_pminnchg;
+    AIDA::IProfile1D *_hist_pdifnchg;
+    AIDA::IProfile1D *_hist_pcptsum;
+    AIDA::IProfile1D *_hist_pmaxcptsum;
+    AIDA::IProfile1D *_hist_pmincptsum;
+    AIDA::IProfile1D *_hist_pdifcptsum;
+    AIDA::IProfile1D *_hist_pcptave;
 
+  };
 
+  
+  
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<STAR_2009_UE_HELEN> plugin_STAR_2009_UE_HELEN;
-
+  
 }


More information about the Rivet-svn mailing list