[Rivet-svn] r1803 - in trunk: . src/Analyses src/Analyses/CDF src/Analyses/D0 src/Analyses/Example src/Analyses/HERA src/Analyses/LEP src/Analyses/MC src/Analyses/Misc src/Analyses/RHIC src/Analyses/SPS

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Wed Sep 2 11:16:22 BST 2009


Author: buckley
Date: Wed Sep  2 11:16:20 2009
New Revision: 1803

Log:
Moving analysis sources back into single directory, after a proletarian uprising ;)

Added:
   trunk/src/Analyses/ALEPH_1991_S2435284.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/ALEPH_1991_S2435284.cc
   trunk/src/Analyses/ALEPH_1996_S3486095.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/ALEPH_1996_S3486095.cc
   trunk/src/Analyses/CDF_1988_S1865951.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_1988_S1865951.cc
   trunk/src/Analyses/CDF_1990_S2089246.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_1990_S2089246.cc
   trunk/src/Analyses/CDF_1994_S2952106.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_1994_S2952106.cc
   trunk/src/Analyses/CDF_2000_S4155203.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2000_S4155203.cc
   trunk/src/Analyses/CDF_2001_S4751469.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2001_S4751469.cc
   trunk/src/Analyses/CDF_2002_S4796047.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2002_S4796047.cc
   trunk/src/Analyses/CDF_2004_S5839831.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2004_S5839831.cc
   trunk/src/Analyses/CDF_2005_S6080774.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2005_S6080774.cc
   trunk/src/Analyses/CDF_2005_S6217184.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2005_S6217184.cc
   trunk/src/Analyses/CDF_2006_S6450792.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2006_S6450792.cc
   trunk/src/Analyses/CDF_2006_S6653332.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2006_S6653332.cc
   trunk/src/Analyses/CDF_2007_S7057202.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2007_S7057202.cc
   trunk/src/Analyses/CDF_2008_LEADINGJETS.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_LEADINGJETS.cc
   trunk/src/Analyses/CDF_2008_NOTE_9351.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_NOTE_9351.cc
   trunk/src/Analyses/CDF_2008_S7540469.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S7540469.cc
   trunk/src/Analyses/CDF_2008_S7541902.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S7541902.cc
   trunk/src/Analyses/CDF_2008_S7782535.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S7782535.cc
   trunk/src/Analyses/CDF_2008_S7828950.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S7828950.cc
   trunk/src/Analyses/CDF_2008_S8093652.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S8093652.cc
   trunk/src/Analyses/CDF_2008_S8095620.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2008_S8095620.cc
   trunk/src/Analyses/CDF_2009_S8057893.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2009_S8057893.cc
   trunk/src/Analyses/CDF_2009_S8233977.cc
      - copied unchanged from r1802, trunk/src/Analyses/CDF/CDF_2009_S8233977.cc
   trunk/src/Analyses/D0_1996_S3214044.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_1996_S3214044.cc
   trunk/src/Analyses/D0_1996_S3324664.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_1996_S3324664.cc
   trunk/src/Analyses/D0_2001_S4674421.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2001_S4674421.cc
   trunk/src/Analyses/D0_2004_S5992206.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2004_S5992206.cc
   trunk/src/Analyses/D0_2006_S6438750.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2006_S6438750.cc
   trunk/src/Analyses/D0_2007_S7075677.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2007_S7075677.cc
   trunk/src/Analyses/D0_2008_S6879055.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S6879055.cc
   trunk/src/Analyses/D0_2008_S7554427.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S7554427.cc
   trunk/src/Analyses/D0_2008_S7662670.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S7662670.cc
   trunk/src/Analyses/D0_2008_S7719523.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S7719523.cc
   trunk/src/Analyses/D0_2008_S7837160.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S7837160.cc
   trunk/src/Analyses/D0_2008_S7863608.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2008_S7863608.cc
   trunk/src/Analyses/D0_2009_S8202443.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2009_S8202443.cc
   trunk/src/Analyses/D0_2009_S8320160.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2009_S8320160.cc
   trunk/src/Analyses/D0_2009_S8349509.cc
      - copied unchanged from r1802, trunk/src/Analyses/D0/D0_2009_S8349509.cc
   trunk/src/Analyses/DELPHI_1995_S3137023.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/DELPHI_1995_S3137023.cc
   trunk/src/Analyses/DELPHI_1996_S3430090.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/DELPHI_1996_S3430090.cc
   trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/DELPHI_2002_069_CONF_603.cc
   trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/DELPHI_2003_WUD_03_11.cc
   trunk/src/Analyses/E735_1998_S3905616.cc
      - copied unchanged from r1802, trunk/src/Analyses/Misc/E735_1998_S3905616.cc
   trunk/src/Analyses/ExampleAnalysis.cc
      - copied unchanged from r1802, trunk/src/Analyses/Example/ExampleAnalysis.cc
   trunk/src/Analyses/ExampleTree.cc
      - copied unchanged from r1802, trunk/src/Analyses/Example/ExampleTree.cc
   trunk/src/Analyses/H1_1994_S2919893.cc
      - copied unchanged from r1802, trunk/src/Analyses/HERA/H1_1994_S2919893.cc
   trunk/src/Analyses/H1_1995_S3167097.cc
      - copied unchanged from r1802, trunk/src/Analyses/HERA/H1_1995_S3167097.cc
   trunk/src/Analyses/H1_2000_S4129130.cc
      - copied unchanged from r1802, trunk/src/Analyses/HERA/H1_2000_S4129130.cc
   trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc
      - copied unchanged from r1802, trunk/src/Analyses/Misc/JADE_OPAL_2000_S4300807.cc
   trunk/src/Analyses/MC_JetAnalysis.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_JetAnalysis.cc
   trunk/src/Analyses/MC_LHC_DIJET.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_LHC_DIJET.cc
   trunk/src/Analyses/MC_LHC_LEADINGJETS.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_LHC_LEADINGJETS.cc
   trunk/src/Analyses/MC_LHC_WANALYSIS.cc   (props changed)
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_LHC_WANALYSIS.cc
   trunk/src/Analyses/MC_LHC_ZANALYSIS.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_LHC_ZANALYSIS.cc
   trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_TVT1960_PHOTONJETS.cc
   trunk/src/Analyses/MC_TVT1960_ZJETS.cc
      - copied unchanged from r1802, trunk/src/Analyses/MC/MC_TVT1960_ZJETS.cc
   trunk/src/Analyses/OPAL_1998_S3780481.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/OPAL_1998_S3780481.cc
   trunk/src/Analyses/OPAL_2004_S6132243.cc
      - copied unchanged from r1802, trunk/src/Analyses/LEP/OPAL_2004_S6132243.cc
   trunk/src/Analyses/PDG_Hadron_Multiplicities.cc
      - copied unchanged from r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities.cc
   trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc
      - copied unchanged from r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities_Ratios.cc
   trunk/src/Analyses/SFM_1984_S1178091.cc
      - copied unchanged from r1802, trunk/src/Analyses/Misc/SFM_1984_S1178091.cc
   trunk/src/Analyses/STAR_2006_S6870392.cc
      - copied unchanged from r1802, trunk/src/Analyses/RHIC/STAR_2006_S6870392.cc
   trunk/src/Analyses/STAR_2008_S7993412.cc
      - copied unchanged from r1802, trunk/src/Analyses/RHIC/STAR_2008_S7993412.cc
   trunk/src/Analyses/STAR_2009_UE_HELEN.cc
      - copied unchanged from r1802, trunk/src/Analyses/RHIC/STAR_2009_UE_HELEN.cc
   trunk/src/Analyses/UA1_1990_S2044935.cc
      - copied unchanged from r1802, trunk/src/Analyses/SPS/UA1_1990_S2044935.cc
   trunk/src/Analyses/UA5_1982_S875503.cc
      - copied unchanged from r1802, trunk/src/Analyses/SPS/UA5_1982_S875503.cc
   trunk/src/Analyses/UA5_1986_S1583476.cc
      - copied unchanged from r1802, trunk/src/Analyses/SPS/UA5_1986_S1583476.cc
   trunk/src/Analyses/UA5_1988_S1867512.cc
      - copied unchanged from r1802, trunk/src/Analyses/SPS/UA5_1988_S1867512.cc
   trunk/src/Analyses/UA5_1989_S1926373.cc
      - copied unchanged from r1802, trunk/src/Analyses/SPS/UA5_1989_S1926373.cc
   trunk/src/Analyses/ZEUS_2001_S4815815.cc
      - copied unchanged from r1802, trunk/src/Analyses/HERA/ZEUS_2001_S4815815.cc
Deleted:
   trunk/src/Analyses/CDF/CDF_1988_S1865951.cc
   trunk/src/Analyses/CDF/CDF_1990_S2089246.cc
   trunk/src/Analyses/CDF/CDF_1994_S2952106.cc
   trunk/src/Analyses/CDF/CDF_2000_S4155203.cc
   trunk/src/Analyses/CDF/CDF_2001_S4751469.cc
   trunk/src/Analyses/CDF/CDF_2002_S4796047.cc
   trunk/src/Analyses/CDF/CDF_2004_S5839831.cc
   trunk/src/Analyses/CDF/CDF_2005_S6080774.cc
   trunk/src/Analyses/CDF/CDF_2005_S6217184.cc
   trunk/src/Analyses/CDF/CDF_2006_S6450792.cc
   trunk/src/Analyses/CDF/CDF_2006_S6653332.cc
   trunk/src/Analyses/CDF/CDF_2007_S7057202.cc
   trunk/src/Analyses/CDF/CDF_2008_LEADINGJETS.cc
   trunk/src/Analyses/CDF/CDF_2008_NOTE_9351.cc
   trunk/src/Analyses/CDF/CDF_2008_S7540469.cc
   trunk/src/Analyses/CDF/CDF_2008_S7541902.cc
   trunk/src/Analyses/CDF/CDF_2008_S7782535.cc
   trunk/src/Analyses/CDF/CDF_2008_S7828950.cc
   trunk/src/Analyses/CDF/CDF_2008_S8093652.cc
   trunk/src/Analyses/CDF/CDF_2008_S8095620.cc
   trunk/src/Analyses/CDF/CDF_2009_S8057893.cc
   trunk/src/Analyses/CDF/CDF_2009_S8233977.cc
   trunk/src/Analyses/D0/D0_1996_S3214044.cc
   trunk/src/Analyses/D0/D0_1996_S3324664.cc
   trunk/src/Analyses/D0/D0_2001_S4674421.cc
   trunk/src/Analyses/D0/D0_2004_S5992206.cc
   trunk/src/Analyses/D0/D0_2006_S6438750.cc
   trunk/src/Analyses/D0/D0_2007_S7075677.cc
   trunk/src/Analyses/D0/D0_2008_S6879055.cc
   trunk/src/Analyses/D0/D0_2008_S7554427.cc
   trunk/src/Analyses/D0/D0_2008_S7662670.cc
   trunk/src/Analyses/D0/D0_2008_S7719523.cc
   trunk/src/Analyses/D0/D0_2008_S7837160.cc
   trunk/src/Analyses/D0/D0_2008_S7863608.cc
   trunk/src/Analyses/D0/D0_2009_S8202443.cc
   trunk/src/Analyses/D0/D0_2009_S8320160.cc
   trunk/src/Analyses/D0/D0_2009_S8349509.cc
   trunk/src/Analyses/Example/ExampleAnalysis.cc
   trunk/src/Analyses/Example/ExampleTree.cc
   trunk/src/Analyses/HERA/H1_1994_S2919893.cc
   trunk/src/Analyses/HERA/H1_1995_S3167097.cc
   trunk/src/Analyses/HERA/H1_2000_S4129130.cc
   trunk/src/Analyses/HERA/ZEUS_2001_S4815815.cc
   trunk/src/Analyses/LEP/ALEPH_1991_S2435284.cc
   trunk/src/Analyses/LEP/ALEPH_1996_S3486095.cc
   trunk/src/Analyses/LEP/DELPHI_1995_S3137023.cc
   trunk/src/Analyses/LEP/DELPHI_1996_S3430090.cc
   trunk/src/Analyses/LEP/DELPHI_2002_069_CONF_603.cc
   trunk/src/Analyses/LEP/DELPHI_2003_WUD_03_11.cc
   trunk/src/Analyses/LEP/OPAL_1998_S3780481.cc
   trunk/src/Analyses/LEP/OPAL_2004_S6132243.cc
   trunk/src/Analyses/MC/MC_JetAnalysis.cc
   trunk/src/Analyses/MC/MC_LHC_DIJET.cc
   trunk/src/Analyses/MC/MC_LHC_LEADINGJETS.cc
   trunk/src/Analyses/MC/MC_LHC_WANALYSIS.cc
   trunk/src/Analyses/MC/MC_LHC_ZANALYSIS.cc
   trunk/src/Analyses/MC/MC_TVT1960_PHOTONJETS.cc
   trunk/src/Analyses/MC/MC_TVT1960_ZJETS.cc
   trunk/src/Analyses/Misc/E735_1998_S3905616.cc
   trunk/src/Analyses/Misc/JADE_OPAL_2000_S4300807.cc
   trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities.cc
   trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities_Ratios.cc
   trunk/src/Analyses/Misc/SFM_1984_S1178091.cc
   trunk/src/Analyses/RHIC/STAR_2006_S6870392.cc
   trunk/src/Analyses/RHIC/STAR_2008_S7993412.cc
   trunk/src/Analyses/RHIC/STAR_2009_UE_HELEN.cc
   trunk/src/Analyses/SPS/UA1_1990_S2044935.cc
   trunk/src/Analyses/SPS/UA5_1982_S875503.cc
   trunk/src/Analyses/SPS/UA5_1986_S1583476.cc
   trunk/src/Analyses/SPS/UA5_1988_S1867512.cc
   trunk/src/Analyses/SPS/UA5_1989_S1926373.cc
Modified:
   trunk/ChangeLog
   trunk/configure.ac
   trunk/src/Analyses/Makefile.am

Modified: trunk/ChangeLog
==============================================================================
--- trunk/ChangeLog	Wed Sep  2 09:03:31 2009	(r1802)
+++ trunk/ChangeLog	Wed Sep  2 11:16:20 2009	(r1803)
@@ -1,3 +1,8 @@
+2009-09-02  Andy Buckley  <andy at insectnation.org>
+
+	* Moving analysis sources back into single directory, after a
+	proletarian uprising ;)
+
 2009-09-01  Andy Buckley  <andy at insectnation.org>
 
 	* Adding WFinder and WAnalysis, based on Z proj and analysis, with

Modified: trunk/configure.ac
==============================================================================
--- trunk/configure.ac	Wed Sep  2 09:03:31 2009	(r1802)
+++ trunk/configure.ac	Wed Sep  2 11:16:20 2009	(r1803)
@@ -237,11 +237,6 @@
 AC_CONFIG_FILES(src/Tools/Makefile src/Tools/yaml-cpp/Makefile)
 AC_CONFIG_FILES(src/Projections/Makefile)
 AC_CONFIG_FILES(src/Analyses/Makefile)
-AC_CONFIG_FILES(src/Analyses/Example/Makefile)
-AC_CONFIG_FILES(src/Analyses/CDF/Makefile src/Analyses/D0/Makefile)
-AC_CONFIG_FILES(src/Analyses/HERA/Makefile src/Analyses/SPS/Makefile)
-AC_CONFIG_FILES(src/Analyses/RHIC/Makefile src/Analyses/LEP/Makefile)
-AC_CONFIG_FILES(src/Analyses/Misc/Makefile src/Analyses/MC/Makefile)
 AC_CONFIG_FILES(src/Test/Makefile)
 AC_CONFIG_FILES(pyext/Makefile)
 AC_CONFIG_FILES(pyext/setup.py)

Copied: trunk/src/Analyses/ALEPH_1991_S2435284.cc (from r1802, trunk/src/Analyses/LEP/ALEPH_1991_S2435284.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/ALEPH_1991_S2435284.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/ALEPH_1991_S2435284.cc)
@@ -0,0 +1,70 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/Multiplicity.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of ALEPH LEP1 charged multiplicity
+  /// @author Andy Buckley
+  class ALEPH_1991_S2435284 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+    
+    /// Constructor.
+    ALEPH_1991_S2435284() 
+      : Analysis("ALEPH_1991_S2435284")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(Multiplicity(cfs), "Mult");
+    }
+
+    //@}  
+
+  
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histogram
+    void init() { 
+      _histChTot = bookHistogram1D(1, 1, 1);
+    }
+
+
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const Multiplicity& m = applyProjection<Multiplicity>(event, "Mult");
+      getLog() << Log::DEBUG << "Total charged multiplicity = " << m.totalMultiplicity() << endl;
+      _histChTot->fill(m.totalMultiplicity(), event.weight());
+    }
+
+
+    /// Normalize the histogram
+    void finalize() {
+      scale(_histChTot, 2.0/sumOfWeights()); // same as in ALEPH 1996
+    }
+
+    //@}  
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _histChTot;
+    //@}
+
+  };    
+
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<ALEPH_1991_S2435284> plugin_ALEPH_1991_S2435284;
+  
+}

Copied: trunk/src/Analyses/ALEPH_1996_S3486095.cc (from r1802, trunk/src/Analyses/LEP/ALEPH_1996_S3486095.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/ALEPH_1996_S3486095.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/ALEPH_1996_S3486095.cc)
@@ -0,0 +1,557 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/Sphericity.hh"
+#include "Rivet/Projections/Thrust.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/ParisiTensor.hh"
+#include "Rivet/Projections/Hemispheres.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/UnstableFinalState.hh"
+
+namespace Rivet {
+
+
+  /// @brief ALEPH QCD study with event shapes and identified particles
+  /// @author Holger Schulz
+  class ALEPH_1996_S3486095 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    ALEPH_1996_S3486095() 
+      : Analysis("ALEPH_1996_S3486095")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
+      addProjection(Sphericity(cfs), "Sphericity");
+      addProjection(ParisiTensor(cfs), "Parisi");
+      const Thrust thrust(cfs);
+      addProjection(thrust, "Thrust");
+      addProjection(Hemispheres(thrust), "Hemispheres");
+      _numChParticles               = 0;
+      _weightedTotalPartNum         = 0;
+      _weightedTotalNumPiPlus       = 0;       
+      _weightedTotalNumKPlus        = 0;      
+      _weightedTotalNumP            = 0;     
+      _weightedTotalNumPhoton       = 0;    
+      _weightedTotalNumPi0          = 0;   
+      _weightedTotalNumEta          = 0;  
+      _weightedTotalNumEtaPrime     = 0; 
+      _weightedTotalNumK0           = 0;
+      _weightedTotalNumLambda0      = 0;
+      _weightedTotalNumXiMinus      = 0;
+      _weightedTotalNumSigma1385Plus= 0;
+      _weightedTotalNumXi1530_0     = 0;
+      _weightedTotalNumRho          = 0;
+      _weightedTotalNumOmega782     = 0;
+      _weightedTotalNumKStar892_0   = 0;
+      _weightedTotalNumPhi          = 0;
+      _weightedTotalNumKStar892Plus = 0;
+    }
+
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      _weightedTotalPartNum += numParticles * weight;
+
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+
+      // Thrusts
+      getLog() << Log::DEBUG << "Calculating thrust" << endl;
+      const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
+      _hist1MinusT->fill(1 - thrust.thrust(), weight); 
+      _histTMinor->fill(thrust.thrustMinor(), weight); 
+      _histOblateness->fill(thrust.oblateness(), weight);
+
+      // Jets
+      getLog() << Log::DEBUG << "Calculating differential jet rate plots:" << endl;
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      if (durjet.clusterSeq()) {
+        double y3 = durjet.clusterSeq()->exclusive_ymerge(2);
+        _histY3->fill(-1. * std::log(y3), weight);
+      }
+
+      // Sphericities
+      getLog() << Log::DEBUG << "Calculating sphericity" << endl;
+      const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
+      _histSphericity->fill(sphericity.sphericity(), weight); 
+      _histAplanarity->fill(sphericity.aplanarity(), weight); 
+
+      // C param
+      getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
+      const ParisiTensor& parisi = applyProjection<ParisiTensor>(e, "Parisi");
+      _histCParam->fill(parisi.C(), weight);
+
+      // Hemispheres
+      getLog() << Log::DEBUG << "Calculating hemisphere variables" << endl;
+      const Hemispheres& hemi = applyProjection<Hemispheres>(e, "Hemispheres");
+      _histHeavyJetMass->fill(hemi.getScaledM2high(), weight);
+
+      // Iterate over all the charged final state particles.
+      double Evis = 0.0;
+      double rapt05 = 0.;
+      double rapt10 = 0.;
+      double rapt15 = 0.;
+      double rapt20 = 0.;
+      //int numChParticles = 0;
+      getLog() << Log::DEBUG << "About to iterate over charged FS particles" << endl;
+      for (ParticleVector::const_iterator p = fs.particles().begin(); p != fs.particles().end(); ++p) {
+        // Get momentum and energy of each particle.
+        const Vector3 mom3 = p->momentum().vector3();
+        const double energy = p->momentum().E();
+        Evis += energy;
+        _numChParticles += weight;
+
+        // Scaled momenta.
+        const double mom = mom3.mod();
+        const double scaledMom = mom/meanBeamMom;
+        const double logInvScaledMom = -std::log(scaledMom);
+        _histLogScaledMom->fill(logInvScaledMom, weight); 
+        _histScaledMom->fill(scaledMom, weight); 
+
+        // Get momenta components w.r.t. thrust and sphericity.
+        const double momT = dot(thrust.thrustAxis(), mom3);
+        const double pTinS = dot(mom3, sphericity.sphericityMajorAxis());
+        const double pToutS = dot(mom3, sphericity.sphericityMinorAxis());
+        _histPtSIn->fill(fabs(pTinS/GeV), weight);
+        _histPtSOut->fill(fabs(pToutS/GeV), weight);
+
+        // Calculate rapidities w.r.t. thrust.
+        const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
+        _histRapidityT->fill(rapidityT, weight);
+        if (std::fabs(rapidityT) <= .5)  {
+            rapt05 += 1.;
+        }
+        if (std::fabs(rapidityT) <= 1.)  {
+            rapt10 += 1.;
+        }
+        if (std::fabs(rapidityT) <= 1.5) {
+            rapt15 += 1.;
+        }
+        if (std::fabs(rapidityT) <= 2.)  {
+            rapt20 += 1.;
+        } 
+
+      }
+
+      _histChMult->fill(numParticles, weight);
+
+      _histMeanChMultRapt05->fill(_histMeanChMultRapt05->binMean(0), rapt05 * weight);
+      _histMeanChMultRapt10->fill(_histMeanChMultRapt10->binMean(0), rapt10 * weight);
+      _histMeanChMultRapt15->fill(_histMeanChMultRapt15->binMean(0), rapt15 * weight);
+      _histMeanChMultRapt20->fill(_histMeanChMultRapt20->binMean(0), rapt20 * weight);
+      _histMeanChMult->fill(_histMeanChMult->binMean(0), numParticles*weight);
+
+
+      //// Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+      for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+        const Vector3 mom3 = p->momentum().vector3();
+        int id = abs(p->pdgId());
+        const double mom = mom3.mod();
+        const double energy = p->momentum().E();
+        const double scaledMom = mom/meanBeamMom;
+        const double scaledEnergy = energy/meanBeamMom;  // meanBeamMom is approximately beam energy
+        switch (id) {
+           case 22: 
+              _histMultiPhoton->fill(-1.*std::log(scaledMom), weight);
+              _weightedTotalNumPhoton += weight;
+              break;
+           case -321:
+           case 321:
+              _weightedTotalNumKPlus += weight;
+              _histMultiKPlus->fill(scaledMom, weight);
+              break;
+           case 211:
+           case -211:
+              _histMultiPiPlus->fill(scaledMom, weight);
+              _weightedTotalNumPiPlus += weight;
+              break;
+           case 2212:
+           case -2212:
+              _histMultiP->fill(scaledMom, weight);
+              _weightedTotalNumP += weight;
+              break;
+           case 111:
+              _histMultiPi0->fill(scaledMom, weight);
+              _histMeanMultiPi0->fill(_histMeanMultiPi0->binMean(0), weight);
+              _weightedTotalNumPi0 += weight;
+              break;
+           case 221:
+              _histMultiEta->fill(scaledEnergy, weight);
+              _histMeanMultiEta->fill(_histMeanMultiEta->binMean(0), weight);
+              _weightedTotalNumEta += weight;
+              break;
+           case 331:
+              _histMultiEtaPrime->fill(scaledEnergy, weight);
+              _histMeanMultiEtaPrime->fill(_histMeanMultiEtaPrime->binMean(0), weight);
+              _weightedTotalNumEtaPrime += weight;
+              break;
+           case 130: //klong
+           case 310: //kshort
+              _histMultiK0->fill(scaledMom, weight);
+              _histMeanMultiK0->fill(_histMeanMultiK0->binMean(0), weight);
+              _weightedTotalNumK0 += weight;
+              break;
+           case 113:
+              _histMultiRho->fill(scaledMom, weight);
+              _histMeanMultiRho->fill(_histMeanMultiRho->binMean(0), weight);
+              _weightedTotalNumRho += weight;
+              break;
+           case 223:
+              _histMultiOmega782->fill(scaledMom, weight);
+              _histMeanMultiOmega782->fill(_histMeanMultiOmega782->binMean(0), weight);
+              _weightedTotalNumOmega782 += weight;
+              break;
+           case 333:
+              _histMultiPhi->fill(scaledMom, weight);
+              _histMeanMultiPhi->fill(_histMeanMultiPhi->binMean(0), weight);
+              _weightedTotalNumPhi += weight;
+              break;
+           case 313:
+           case -313:
+              _histMultiKStar892_0->fill(scaledMom, weight);
+              _histMeanMultiKStar892_0->fill(_histMeanMultiKStar892_0->binMean(0), weight);
+              _weightedTotalNumKStar892_0 += weight;
+              break;
+           case 323:
+           case -323:
+              _histMultiKStar892Plus->fill(scaledEnergy, weight);
+              _histMeanMultiKStar892Plus->fill(_histMeanMultiKStar892Plus->binMean(0), weight);
+              _weightedTotalNumKStar892Plus += weight;
+              break;
+           case 3122:
+           case -3122:
+              _histMultiLambda0->fill(scaledMom, weight);
+              _histMeanMultiLambda0->fill(_histMeanMultiLambda0->binMean(0), weight);
+              _weightedTotalNumLambda0 += weight;
+              break;
+           case 3212:
+           case -3212:
+              _histMeanMultiSigma0->fill(_histMeanMultiSigma0->binMean(0), weight);
+           case 3312:
+           case -3312:
+              _histMultiXiMinus->fill(scaledEnergy, weight);
+              _histMeanMultiXiMinus->fill(_histMeanMultiXiMinus->binMean(0), weight);
+              _weightedTotalNumXiMinus += weight;
+              break;
+           case 3114:
+           case -3114: //maybe missing sigma(1385p13)
+              _histMultiSigma1385Plus->fill(scaledEnergy, weight);
+              _histMeanMultiSigma1385Plus->fill(_histMeanMultiSigma1385Plus->binMean(0), weight);
+              _weightedTotalNumSigma1385Plus += weight;
+              break;
+           case 3324:
+           case -3324:
+              _histMultiXi1530_0->fill(scaledEnergy, weight);
+              _histMeanMultiXi1530_0->fill(_histMeanMultiXi1530_0->binMean(0), weight);
+              _weightedTotalNumXi1530_0 += weight;
+              break;
+           case 3334:
+              _histMeanMultiOmegaOmegaBar->fill(_histMeanMultiOmegaOmegaBar->binMean(0), weight);
+              break;
+        }
+      }
+
+    }
+
+
+    void init() {
+      _histSphericity   = bookHistogram1D(1, 1, 1);
+      _histAplanarity   = bookHistogram1D(2, 1, 1);
+
+      _hist1MinusT      = bookHistogram1D(3, 1, 1);
+      _histTMinor       = bookHistogram1D(4, 1, 1);
+
+      _histY3           = bookHistogram1D(5, 1, 1);
+      _histHeavyJetMass = bookHistogram1D(6, 1, 1); 
+      _histCParam       = bookHistogram1D(7, 1, 1); 
+      _histOblateness   = bookHistogram1D(8, 1, 1); 
+
+      _histScaledMom    = bookHistogram1D(9, 1, 1); 
+      _histRapidityT    = bookHistogram1D(10, 1, 1); 
+
+      _histPtSIn        = bookHistogram1D(11, 1, 1); 
+      _histPtSOut       = bookHistogram1D(12, 1, 1); 
+
+      _histLogScaledMom = bookHistogram1D(17, 1, 1); 
+
+      _histChMult       = bookHistogram1D(18, 1, 1); 
+      _histMeanChMult   = bookHistogram1D(19, 1, 1); 
+
+      _histMeanChMultRapt05= bookHistogram1D(20, 1, 1); 
+      _histMeanChMultRapt10= bookHistogram1D(21, 1, 1); 
+      _histMeanChMultRapt15= bookHistogram1D(22, 1, 1); 
+      _histMeanChMultRapt20= bookHistogram1D(23, 1, 1); 
+
+
+      // Particle spectra
+      _histMultiPiPlus        = bookHistogram1D(25, 1, 1); 
+      _histMultiKPlus         = bookHistogram1D(26, 1, 1); 
+      _histMultiP             = bookHistogram1D(27, 1, 1); 
+      _histMultiPhoton        = bookHistogram1D(28, 1, 1); 
+      _histMultiPi0           = bookHistogram1D(29, 1, 1); 
+      _histMultiEta           = bookHistogram1D(30, 1, 1); 
+      _histMultiEtaPrime      = bookHistogram1D(31, 1, 1); 
+      _histMultiK0            = bookHistogram1D(32, 1, 1); 
+      _histMultiLambda0       = bookHistogram1D(33, 1, 1); 
+      _histMultiXiMinus       = bookHistogram1D(34, 1, 1); 
+      _histMultiSigma1385Plus = bookHistogram1D(35, 1, 1); 
+      _histMultiXi1530_0      = bookHistogram1D(36, 1, 1); 
+      _histMultiRho           = bookHistogram1D(37, 1, 1); 
+      _histMultiOmega782      = bookHistogram1D(38, 1, 1); 
+      _histMultiKStar892_0    = bookHistogram1D(39, 1, 1); 
+      _histMultiPhi           = bookHistogram1D(40, 1, 1); 
+
+      _histMultiKStar892Plus  = bookHistogram1D(43, 1, 1); 
+
+      // Mean multiplicities 
+      _histMeanMultiPi0           = bookHistogram1D(44, 1,  2);
+      _histMeanMultiEta           = bookHistogram1D(44, 1,  3);
+      _histMeanMultiEtaPrime      = bookHistogram1D(44, 1,  4);
+      _histMeanMultiK0            = bookHistogram1D(44, 1,  5);
+      _histMeanMultiRho           = bookHistogram1D(44, 1,  6);
+      _histMeanMultiOmega782      = bookHistogram1D(44, 1,  7);
+      _histMeanMultiPhi           = bookHistogram1D(44, 1,  8);
+      _histMeanMultiKStar892Plus  = bookHistogram1D(44, 1,  9);
+      _histMeanMultiKStar892_0    = bookHistogram1D(44, 1, 10);
+      _histMeanMultiLambda0       = bookHistogram1D(44, 1, 11);
+      _histMeanMultiSigma0        = bookHistogram1D(44, 1, 12);
+      _histMeanMultiXiMinus       = bookHistogram1D(44, 1, 13);
+      _histMeanMultiSigma1385Plus = bookHistogram1D(44, 1, 14);
+      _histMeanMultiXi1530_0      = bookHistogram1D(44, 1, 15);
+      _histMeanMultiOmegaOmegaBar = bookHistogram1D(44, 1, 16);
+    }
+
+
+
+    /// Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      const double avgNumParts = _weightedTotalPartNum / sumOfWeights();
+
+      normalize(_histPtSIn, avgNumParts);
+      normalize(_histPtSOut, avgNumParts); 
+
+      normalize(_histRapidityT, avgNumParts); 
+      normalize(_histY3); 
+
+      normalize(_histLogScaledMom, avgNumParts);
+      normalize(_histScaledMom, avgNumParts); 
+
+      // particle spectra
+      scale(_histMultiPiPlus        ,1./sumOfWeights());
+      scale(_histMultiKPlus         ,1./sumOfWeights());
+      scale(_histMultiP             ,1./sumOfWeights());
+      scale(_histMultiPhoton        ,1./sumOfWeights());
+      scale(_histMultiPi0           ,1./sumOfWeights());
+      scale(_histMultiEta           ,1./sumOfWeights());
+      scale(_histMultiEtaPrime      ,1./sumOfWeights());
+      scale(_histMultiK0            ,1./sumOfWeights());
+      scale(_histMultiLambda0       ,1./sumOfWeights());
+      scale(_histMultiXiMinus       ,1./sumOfWeights());
+      scale(_histMultiSigma1385Plus ,1./sumOfWeights());
+      scale(_histMultiXi1530_0      ,1./sumOfWeights());
+      scale(_histMultiRho           ,1./sumOfWeights());
+      scale(_histMultiOmega782      ,1./sumOfWeights());
+      scale(_histMultiKStar892_0    ,1./sumOfWeights());
+      scale(_histMultiPhi           ,1./sumOfWeights());
+
+      scale(_histMultiKStar892Plus  ,1./sumOfWeights());
+
+      //normalize(_histMultiPiPlus        ,_weightedTotalNumPiPlus / sumOfWeights());
+      //normalize(_histMultiKPlus         ,_weightedTotalNumKPlus/sumOfWeights());
+      //normalize(_histMultiP             ,_weightedTotalNumP/sumOfWeights());
+      //normalize(_histMultiPhoton            ,_weightedTotalNumPhoton/sumOfWeights());
+      //normalize(_histMultiPi0           ,_weightedTotalNumPi0/sumOfWeights());
+      //normalize(_histMultiEta           ,_weightedTotalNumEta/sumOfWeights());
+      //normalize(_histMultiEtaPrime      ,_weightedTotalNumEtaPrime/sumOfWeights());
+      //normalize(_histMultiK0            ,_weightedTotalNumK0/sumOfWeights());
+      //normalize(_histMultiLambda0       ,_weightedTotalNumLambda0/sumOfWeights());
+      //normalize(_histMultiXiMinus       ,_weightedTotalNumXiMinus/sumOfWeights());
+      //normalize(_histMultiSigma1385Plus ,_weightedTotalNumSigma1385Plus/sumOfWeights());
+      //normalize(_histMultiXi1530_0      ,_weightedTotalNumXi1530_0 /sumOfWeights());
+      //normalize(_histMultiRho           ,_weightedTotalNumRho/sumOfWeights());
+      //normalize(_histMultiOmegaMinus    ,_weightedTotalNumOmegaMinus/sumOfWeights());
+      //normalize(_histMultiKStar892_0    ,_weightedTotalNumKStar892_0/sumOfWeights());
+      //normalize(_histMultiPhi           ,_weightedTotalNumPhi/sumOfWeights());
+
+      //normalize(_histMultiKStar892Plus  ,_weightedTotalNumKStar892Plus/sumOfWeights());
+
+      // event shape
+      normalize(_hist1MinusT); 
+      normalize(_histTMinor); 
+      normalize(_histOblateness); 
+
+      normalize(_histSphericity); 
+      normalize(_histAplanarity); 
+      normalize(_histHeavyJetMass);  
+      normalize(_histCParam); 
+
+
+      // mean multiplicities 
+      scale(_histChMult              , 2.0/sumOfWeights()); // taking into account the binwidth of 2 
+      scale(_histMeanChMult          , 1.0/sumOfWeights());
+      scale(_histMeanChMultRapt05    , 1.0/sumOfWeights());
+      scale(_histMeanChMultRapt10    , 1.0/sumOfWeights());
+      scale(_histMeanChMultRapt15    , 1.0/sumOfWeights());
+      scale(_histMeanChMultRapt20    , 1.0/sumOfWeights());
+
+
+      scale(_histMeanMultiPi0          , 1.0/sumOfWeights());
+      scale(_histMeanMultiEta          , 1.0/sumOfWeights());
+      scale(_histMeanMultiEtaPrime     , 1.0/sumOfWeights());
+      scale(_histMeanMultiK0           , 1.0/sumOfWeights());
+      scale(_histMeanMultiRho          , 1.0/sumOfWeights());
+      scale(_histMeanMultiOmega782     , 1.0/sumOfWeights());
+      scale(_histMeanMultiPhi          , 1.0/sumOfWeights());
+      scale(_histMeanMultiKStar892Plus , 1.0/sumOfWeights());
+      scale(_histMeanMultiKStar892_0   , 1.0/sumOfWeights());
+      scale(_histMeanMultiLambda0      , 1.0/sumOfWeights());
+      scale(_histMeanMultiSigma0       , 1.0/sumOfWeights());
+      scale(_histMeanMultiXiMinus      , 1.0/sumOfWeights());
+      scale(_histMeanMultiSigma1385Plus, 1.0/sumOfWeights());
+      scale(_histMeanMultiXi1530_0     , 1.0/sumOfWeights());
+      scale(_histMeanMultiOmegaOmegaBar, 1.0/sumOfWeights());
+    }
+
+    //@}
+
+
+  private:
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalPartNum;
+    double _weightedTotalNumPiPlus;       
+    double _weightedTotalNumKPlus;      
+    double _weightedTotalNumP;     
+    double _weightedTotalNumPhoton;    
+    double _weightedTotalNumPi0;   
+    double _weightedTotalNumEta;  
+    double _weightedTotalNumEtaPrime; 
+    double _weightedTotalNumK0;
+    double _weightedTotalNumLambda0;
+    double _weightedTotalNumXiMinus;
+    double _weightedTotalNumSigma1385Plus;
+    double _weightedTotalNumXi1530_0;
+    double _weightedTotalNumRho;
+    double _weightedTotalNumOmega782;
+    double _weightedTotalNumKStar892_0;
+    double _weightedTotalNumPhi;
+    double _weightedTotalNumKStar892Plus;
+    double _numChParticles;
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histSphericity;
+    AIDA::IHistogram1D *_histAplanarity;
+
+    AIDA::IHistogram1D *_hist1MinusT; 
+    AIDA::IHistogram1D *_histTMinor; 
+    
+    AIDA::IHistogram1D *_histY3;
+    AIDA::IHistogram1D *_histHeavyJetMass;
+    AIDA::IHistogram1D *_histCParam;
+    AIDA::IHistogram1D *_histOblateness; 
+    
+    AIDA::IHistogram1D *_histScaledMom; 
+    AIDA::IHistogram1D *_histRapidityT;
+
+    AIDA::IHistogram1D *_histPtSIn;
+    AIDA::IHistogram1D *_histPtSOut;
+    
+    AIDA::IHistogram1D *_histJetRate2Durham;
+    AIDA::IHistogram1D *_histJetRate3Durham;
+    AIDA::IHistogram1D *_histJetRate4Durham;
+    AIDA::IHistogram1D *_histJetRate5Durham;
+   
+    AIDA::IHistogram1D *_histLogScaledMom;
+    
+    
+    AIDA::IHistogram1D *_histChMult;
+    
+
+    AIDA::IHistogram1D *_histMultiPiPlus;
+    AIDA::IHistogram1D *_histMultiKPlus;
+    AIDA::IHistogram1D *_histMultiP;
+    AIDA::IHistogram1D *_histMultiPhoton;
+    AIDA::IHistogram1D *_histMultiPi0;
+    AIDA::IHistogram1D *_histMultiEta;
+    AIDA::IHistogram1D *_histMultiEtaPrime;
+    AIDA::IHistogram1D *_histMultiK0;
+    AIDA::IHistogram1D *_histMultiLambda0;
+    AIDA::IHistogram1D *_histMultiXiMinus;
+    AIDA::IHistogram1D *_histMultiSigma1385Plus;
+    AIDA::IHistogram1D *_histMultiXi1530_0;
+    AIDA::IHistogram1D *_histMultiRho;
+    AIDA::IHistogram1D *_histMultiOmega782;
+    AIDA::IHistogram1D *_histMultiKStar892_0;
+    AIDA::IHistogram1D *_histMultiPhi;
+    AIDA::IHistogram1D *_histMultiKStar892Plus;
+   
+    // mean multiplicities
+    AIDA::IHistogram1D *_histMeanChMult;
+    AIDA::IHistogram1D *_histMeanChMultRapt05;
+    AIDA::IHistogram1D *_histMeanChMultRapt10;
+    AIDA::IHistogram1D *_histMeanChMultRapt15;
+    AIDA::IHistogram1D *_histMeanChMultRapt20;
+    
+    AIDA::IHistogram1D *_histMeanMultiPi0;          
+    AIDA::IHistogram1D *_histMeanMultiEta;          
+    AIDA::IHistogram1D *_histMeanMultiEtaPrime;     
+    AIDA::IHistogram1D *_histMeanMultiK0;           
+    AIDA::IHistogram1D *_histMeanMultiRho;          
+    AIDA::IHistogram1D *_histMeanMultiOmega782;        
+    AIDA::IHistogram1D *_histMeanMultiPhi;         
+    AIDA::IHistogram1D *_histMeanMultiKStar892Plus; 
+    AIDA::IHistogram1D *_histMeanMultiKStar892_0;   
+    AIDA::IHistogram1D *_histMeanMultiLambda0;      
+    AIDA::IHistogram1D *_histMeanMultiSigma0;       
+    AIDA::IHistogram1D *_histMeanMultiXiMinus;      
+    AIDA::IHistogram1D *_histMeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_histMeanMultiXi1530_0;     
+    AIDA::IHistogram1D *_histMeanMultiOmegaOmegaBar;        
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<ALEPH_1996_S3486095> plugin_ALEPH_1996_S3486095;
+
+}

Copied: trunk/src/Analyses/CDF_1988_S1865951.cc (from r1802, trunk/src/Analyses/CDF/CDF_1988_S1865951.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_1988_S1865951.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_1988_S1865951.cc)
@@ -0,0 +1,115 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/Beam.hh"
+
+namespace Rivet {
+
+
+  class CDF_1988_S1865951 : public Analysis {
+
+  public:
+
+    /// @name Constructor etc.
+    //@{
+
+    /// Constructor
+    CDF_1988_S1865951() 
+      : Analysis("CDF_1988_S1865951") 
+    {
+      const ChargedFinalState cfs(-1.,1., 0.4*GeV);
+      addProjection(cfs, "CFS");
+      addProjection(ChargedFinalState(-5.9, 5.9), "CFSAll");
+      addProjection(TotalVisibleMomentum(cfs), "Mom");
+      addProjection(Beam(), "Beam");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() { 
+      _hist_pt1800 = bookHistogram1D(1, 1, 1);
+      _hist_pt630 = bookHistogram1D(2, 1, 1);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const FinalState& fs = applyProjection<ChargedFinalState>(event, "CFS");
+      const double weight = event.weight();
+      
+      // Minimum Bias trigger requirements from the BBC counters
+      int n_trig_1 = 0;
+      int n_trig_2 = 0;
+      
+      // Event selection based on tracks in VTPC (time projection chambers)
+      // Require at least 4 tracks with at least one in each of the forward
+      // and backward hemispheres
+      int n_backward = 0;
+      int n_forward = 0;
+      
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFSAll");
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.9, -3.2)) n_trig_1 += 1;
+        else if (inRange(eta, 3.2, 5.9)) n_trig_2 += 1;
+        
+        if (inRange(eta, -3.0, 0.0)) n_backward += 1;
+        else if (inRange(eta, 0.0, 3.0)) n_forward += 1;
+      }
+      
+      // Require at least one coincidence hit in both BBC counters
+      if (n_trig_1 == 0 || n_trig_2 == 0) vetoEvent; 
+      getLog() << Log::DEBUG << "Trigger 1: " << n_trig_1 << " Trigger 2: " << n_trig_2 << endl;
+      
+      // Further event selection cut
+      if (n_backward + n_forward < 4 || n_backward == 0 || n_forward == 0) vetoEvent;
+      getLog() << Log::DEBUG << " Num. forward: " << n_forward  << ", Num. backward: " << n_backward << endl;
+      
+      foreach (Particle p, fs.particles()) {
+        const double pt = p.momentum().pT();
+        // Effective weight for d3sig/dp3 = weight / ( Delta eta * 2pi * pt ), with Delta(eta) = 2
+        const double eff_weight = weight/(2*TWOPI*pt);
+        if (fuzzyEquals(sqrtS, 630/GeV)) {
+          _hist_pt630->fill(pt, eff_weight);
+        } else if (fuzzyEquals(sqrtS, 1800/GeV)) {
+          _hist_pt1800->fill(pt, eff_weight);
+        }
+        
+      }
+    }
+    
+    
+    /// Scale histos
+    void finalize() {
+      /// @todo Total cross section hard-coded, needs a way to pass variable from generator
+      scale(_hist_pt630, 32.6/sumOfWeights());
+      scale(_hist_pt1800, 38.5/sumOfWeights());
+    }
+   
+    //@}
+
+  private:
+    
+    /// @name Histos
+    //@{
+    AIDA::IHistogram1D* _hist_pt630;
+    AIDA::IHistogram1D* _hist_pt1800;
+    //@}
+
+  };
+ 
+  
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_1988_S1865951> plugin_CDF_1988_S1865951;
+
+}

Copied: trunk/src/Analyses/CDF_1990_S2089246.cc (from r1802, trunk/src/Analyses/CDF/CDF_1990_S2089246.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_1990_S2089246.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_1990_S2089246.cc)
@@ -0,0 +1,118 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+
+namespace Rivet {
+
+
+  /* @brief CDF pseudorapidity analysis
+   * @author Andy Buckley
+   */
+  class CDF_1990_S2089246 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_1990_S2089246()
+      : Analysis("CDF_1990_S2089246")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(ChargedFinalState(-3.5, 3.5), "FS");
+      addProjection(ChargedFinalState(-5.9, 5.9), "CFSAll");
+      addProjection(Beam(), "Beam");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _hist_eta1800 = bookHistogram1D(3, 1, 1);
+      _hist_eta630 = bookHistogram1D(4, 1, 1);
+    }
+
+
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const double weight = event.weight();
+      
+      // Minimum Bias trigger requirements from the BBC counters
+      int n_trig_1 = 0;
+      int n_trig_2 = 0;
+      
+      // Event selection based on tracks in VTPC (time projection chambers)
+      // Require at least 4 tracks with at least one in each of the forward
+      // and backward hemispheres
+      int n_backward = 0;
+      int n_forward = 0;
+      
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFSAll");
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.9, -3.2)) n_trig_1++;
+        else if (inRange(eta, 3.2, 5.9)) n_trig_2++;
+        
+        if (inRange(eta, -3.0, 0.0)) n_backward++;
+        else if (inRange(eta, 0.0, 3.0)) n_forward++;
+      }
+      
+      // Require at least one coincidence hit in both BBC counters
+      if (n_trig_1 == 0 || n_trig_2 == 0) vetoEvent; 
+      getLog() << Log::DEBUG << "Trigger 1: " << n_trig_1 << " Trigger 2: " << n_trig_2 << endl;
+      
+      // Further event selection cut
+      if (n_backward + n_forward < 4 || n_backward == 0 || n_forward == 0) vetoEvent;
+      getLog() << Log::DEBUG << " Num. forward: " << n_forward  << ", Num. backward: " << n_backward << endl;
+      
+      // Loop over final state charged particles 
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      foreach (const Particle& p, fs.particles()) {
+        const double eta = p.momentum().pseudorapidity();
+        if (fuzzyEquals(sqrtS/GeV, 630)) {
+          _hist_eta630->fill(fabs(eta), weight);
+        } else if (fuzzyEquals(sqrtS/GeV, 1800)) {
+          _hist_eta1800->fill(fabs(eta), weight);
+        }
+      }
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      // Divide through by num events to get d<N>/d(eta) in bins
+      scale(_hist_eta630, 1/sumOfWeights());
+      scale(_hist_eta1800, 1/sumOfWeights());
+    }
+   
+    //@}
+
+
+  private:
+
+    /// @name Histogram collections
+    //@{
+    AIDA::IHistogram1D* _hist_eta630;
+    AIDA::IHistogram1D* _hist_eta1800;
+    //@}
+
+  };
+ 
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_1990_S2089246> plugin_CDF_1990_S2089246;
+
+}

Copied: trunk/src/Analyses/CDF_1994_S2952106.cc (from r1802, trunk/src/Analyses/CDF/CDF_1994_S2952106.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_1994_S2952106.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_1994_S2952106.cc)
@@ -0,0 +1,231 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/PVertex.hh"
+
+namespace Rivet {
+
+
+  /* @brief CDF Run I color coherence analysis
+   * @author Lars Sonnenschein
+   */
+  class CDF_1994_S2952106 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_1994_S2952106()
+      : Analysis("CDF_1994_S2952106"), 
+        _pvzmax(600*mm), _leadJetPt(100*GeV), _3rdJetPt(10*GeV),
+        _etamax(0.7), _phimin(PI/18.0), _metsetmax(6.0*GeV)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      const FinalState fs(-4.2, 4.2);
+      addProjection(fs, "FS");
+      addProjection(FastJets(fs, FastJets::CDFJETCLU, 0.7), "ConeJets");
+      addProjection(TotalVisibleMomentum(fs), "CalMET");
+      addProjection(PVertex(), "PV");
+      
+      // Veto (anti)neutrinos, and muons with pT above 1.0 GeV
+      VetoedFinalState vfs(fs);
+      vfs.vetoNeutrinos();
+      vfs.addVetoDetail(MUON, 1.0*GeV, MAXDOUBLE);
+      addProjection(vfs, "VFS");
+      
+      _events3jPassed = 0.0;
+    }
+
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      /// @todo Use histogram auto-booking
+      
+      //const string hname = "HvsDphi";
+      //const string htitle = "H vs Delta phi";
+      //_histHvsDphi = bookHistogram2D(hname, htitle, 40, -4., 4., 32, 0., 3.2);
+      
+      //const string hname2 = "RvsAlpha";
+      //const string htitle2 = "R vs alpha";
+      //_histRvsAlpha = bookHistogram2D(hname2, htitle2, 50, 0., 5., 32, -1.6, 1.6);
+      
+      _histJet1Et  = bookHistogram1D("Jet1Et", 40, 0., 500.);
+      _histJet2Et  = bookHistogram1D("Jet2Et", 40, 0., 500.);
+      _histR23     = bookHistogram1D("R23", 50, 0., 5.);
+      _histJet3eta = bookHistogram1D("Jet3eta", 42, -4., 4.);
+      
+      /// @todo Need better title
+      _histAlpha = bookHistogram1D("alpha", 42, -PI/2., PI/2.);
+      
+      //const string hname8 = "alphaMCvsDat";
+      //const string htitle8 = "alpha MC vs. Data ";
+      //_histAlphaMCvsDat = bookHistogram1D(hname8, htitle8, 42, -PI/2., PI/2.);
+      
+      /// @todo Need better title
+      _histAlpaIdeal = bookHistogram1D("alphaIdeal", 42, -PI/2., PI/2.);
+      
+      /// @todo Need better title
+      _histAlpaCDF = bookHistogram1D("alphaCDF", 42, -PI/2., PI/2.);
+      
+      /// @todo Need better title
+      _histR23Ideal = bookHistogram1D("R23Ideal", 50, 0., 5.);
+      
+      /// @todo Need better title
+      _histR23CDF = bookHistogram1D("R23CDF", 50, 0., 5.);
+      
+      /// @todo Need better title
+      _histJet3etaIdeal = bookHistogram1D("Jet3etaIdeal", 42, -4., 4.);
+      
+      /// @todo Need better title
+      _histJet3etaCDF = bookHistogram1D("Jet3etaCDF", 42, -4., 4.);
+    }
+    
+    
+    
+    // Do the analysis
+    void analyze(const Event & event) {
+      const Jets jets = applyProjection<FastJets>(event, "ConeJets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity before any cuts = " << jets.size() << endl;
+      
+      // Find vertex and check  that its z-component is < 60 cm from the nominal IP
+      const PVertex& pv = applyProjection<PVertex>(event, "PV");
+      if (fabs(pv.position().z())/mm > _pvzmax) {
+        vetoEvent;
+      }
+      
+      // Check there isn't too much missing Et
+      const TotalVisibleMomentum& caloMissEt = applyProjection<TotalVisibleMomentum>(event, "CalMET");
+      getLog() << Log::DEBUG << "Missing pT = " << caloMissEt.momentum().pT()/GeV << " GeV" << endl;      
+      if ((caloMissEt.momentum().pT()/GeV) / sqrt(caloMissEt.scalarET()/GeV) > _metsetmax) {
+        vetoEvent;
+      }
+      
+      // Check jet requirements
+      if (jets.size() < 3) vetoEvent;
+      if (jets[0].momentum().pT() < 100*GeV) vetoEvent;
+      
+      // More jet 1,2,3 checks
+      FourMomentum pj1(jets[0].momentum()), pj2(jets[1].momentum()), pj3(jets[2].momentum());
+      if (fabs(pj1.eta()) > _etamax || fabs(pj2.eta()) > _etamax) vetoEvent;
+      getLog() << Log::DEBUG << "Jet 1 & 2 eta, pT requirements fulfilled" << endl;          
+      
+      if (deltaPhi(pj1.phi(), pj2.phi()) > _phimin) vetoEvent;
+      getLog() << Log::DEBUG << "Jet 1 & 2 phi requirement fulfilled" << endl;
+      
+      const double weight = event.weight();
+      _histJet1Et->fill(pj1.pT(), weight);
+      _histJet2Et->fill(pj2.pT(), weight);
+      _histR23->fill(deltaR(pj2, pj3), weight);
+      _histJet3eta->fill(pj3.eta(), weight);
+      
+      // Next cut only required for alpha studies
+      if (pj3.pT() < _3rdJetPt) vetoEvent;
+      getLog() << Log::DEBUG << "3rd jet passes alpha histo pT cut" << endl;      
+      _events3jPassed += weight;
+      
+      // Calc and plot alpha
+      const double dPhi = deltaPhi(pj3.phi(), pj2.phi());    
+      const double dH = sign(pj2.eta()) * (pj3.eta() - pj2.eta());
+      const double alpha = atan(dH/dPhi);
+      _histAlpha->fill(alpha, weight);
+    }
+    
+    
+    /// Finalize
+    void finalize() { 
+      /// @todo Apply correction
+      // double a, b, c, erra, errb, errc;
+      // for (int ibin = 0;  ibin < _histAlpha->getNbins(); ++ibin) {
+      // a = _histAlpha->GetBinContent(ibin);
+      // erra = _histAlpha->GetBinError(ibin);
+      // b = _histAlpaIdeal->GetBinContent(ibin);
+      // errb = _histAlpaIdeal->GetBinError(ibin);
+      // c = _histAlpaCDF->GetBinContent(ibin);
+      // errc = _histAlpaCDF->GetBinError(ibin);
+      // _histAlpha->SetBinContent(ibin, b/c);
+      // _histAlpha->SetBinError(ibin, sqrt(sqr(b)/sqr(c)*sqr(erra) + sqr(a)/sqr(c)*sqr(errb) + 
+      // sqr(a*b/(sqr(c)))*sqr(errc) ) );
+      // }
+      /// @todo Same correction to be applied for _hisR23 and _histJet3eta histograms
+      
+      getLog() << Log::INFO << "Cross-section = " << crossSection()/picobarn << " pb" << endl;
+      normalize(_histJet1Et);
+      normalize(_histJet2Et);
+      normalize(_histR23);
+      normalize(_histJet3eta);
+      normalize(_histAlpha);
+    }
+ 
+    //@}
+
+  private:
+
+    /// Counter for the number of events analysed
+    double _eventsTried;
+
+    /// Counter for the number of  3jet events passed
+    double _events3jPassed;
+
+
+    /// @name Analysis cuts
+    //@{
+    ///Cut on primary vertex z-position (z(PV) < 60 cm)
+    const double _pvzmax;
+
+    /// Min \f$ p_T \f$ of the leading and 3rd leading jets.
+    //@{
+    const double _leadJetPt;
+    const double _3rdJetPt;
+    //@}
+
+    /// Max pseudorapidity range of 2nd and 3rd leading jets.
+    const double _etamax;
+
+    /// Delta phi (azimuthal angle) requirement (transverse back to back'ness).
+    const double _phimin;
+
+    /// MET over sqrt(scalar \f$ E_T \f$) cut requirement.
+    const double _metsetmax;
+    //@}
+
+
+  private:
+
+    /// @name Histogram collections
+    //@{
+    // AIDA::IHistogram2D* _histHvsDphi;
+    // AIDA::IHistogram2D* _histRvsAlpha;
+    AIDA::IHistogram1D* _histJet1Et;
+    AIDA::IHistogram1D* _histJet2Et;
+    AIDA::IHistogram1D* _histR23;
+    AIDA::IHistogram1D* _histJet3eta;
+    AIDA::IHistogram1D* _histAlpha;
+    // AIDA::IHistogram1D* _histAlphaMCvsDat;
+    AIDA::IHistogram1D* _histAlpaIdeal;
+    AIDA::IHistogram1D* _histAlpaCDF;
+    AIDA::IHistogram1D* _histR23Ideal;
+    AIDA::IHistogram1D* _histR23CDF;
+    AIDA::IHistogram1D* _histJet3etaIdeal;
+    AIDA::IHistogram1D* _histJet3etaCDF;
+    //@}
+
+  };
+   
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_1994_S2952106> plugin_CDF_1994_S2952106;
+
+}

Copied: trunk/src/Analyses/CDF_2000_S4155203.cc (from r1802, trunk/src/Analyses/CDF/CDF_2000_S4155203.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2000_S4155203.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2000_S4155203.cc)
@@ -0,0 +1,80 @@
+// -*- C++ -*-
+// CDF Z pT analysis
+
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ZFinder.hh"
+
+namespace Rivet {
+
+
+  /*
+   * @brief CDF Run I Z pT in Drell-Yan events
+   * @author Hendrik Hoeth
+   */ 
+  class CDF_2000_S4155203 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.5 \f$ GeV.
+    CDF_2000_S4155203() 
+      : Analysis("CDF_2000_S4155203")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      ZFinder zfinder(FinalState(), ELECTRON, 66.0*GeV, 116.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      _hist_zpt = bookHistogram1D(1, 1, 1);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size() != 1) {
+        getLog() << Log::DEBUG << "No unique e+e- pair found" << endl;
+        vetoEvent;
+      }
+      
+      FourMomentum pZ = zfinder.particles()[0].momentum();    
+      getLog() << Log::DEBUG << "Dilepton mass = " << pZ.mass()/GeV << " GeV"  << endl;
+      getLog() << Log::DEBUG << "Dilepton pT   = " << pZ.pT()/GeV << " GeV" << endl;
+      _hist_zpt->fill(pZ.pT()/GeV, e.weight());
+    }
+    
+    
+    void finalize() {
+      // Normalize to the experimental cross-section
+      /// @todo Get norm from generator cross-section
+      normalize(_hist_zpt, 247.4);
+    }
+    
+    //@}
+
+
+  private:
+
+    // Histogram
+    AIDA::IHistogram1D *_hist_zpt;
+
+  };
+
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2000_S4155203> plugin_CDF_2000_S4155203;
+
+}

Copied: trunk/src/Analyses/CDF_2001_S4751469.cc (from r1802, trunk/src/Analyses/CDF/CDF_2001_S4751469.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2001_S4751469.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2001_S4751469.cc)
@@ -0,0 +1,233 @@
+// -*- C++ -*-
+
+// Field & Stuart underlying event analysis at CDF.
+// Phys.Rev.D65:092002,2002 - no arXiv code.
+// FNAL-PUB 01/211-E
+
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/LossyFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /* @brief "Field-Stuart" CDF Run I underlying event analysis
+   * @author Andy Buckley
+   * 
+   * 
+   * @par Run conditions
+   * 
+   * @arg \f$ \sqrt{s} = \f$ 1800 GeV
+   * @arg Run with generic QCD events.
+   * @arg Several \f$ p_\perp^\text{min} \f$ cutoffs are probably required to fill the profile histograms:
+   *   @arg \f$ p_\perp^\text{min} = \f$ 0 (min bias), 10, 20 GeV
+   * 
+   */ 
+  class CDF_2001_S4751469 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.5 \f$ GeV. Use a lossy charged FS projection, which
+    /// randomly discards 8% of charged particles, as a kind of hacky detector 
+    /// correction.
+    CDF_2001_S4751469()
+      : Analysis("CDF_2001_S4751469"),
+        _totalNumTrans2(0), _totalNumTrans5(0), _totalNumTrans30(0),
+        _sumWeightsPtLead2(0),_sumWeightsPtLead5(0), _sumWeightsPtLead30(0)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      const LossyFinalState lfs(cfs, 0.08);
+      addProjection(lfs, "FS");
+      addProjection(FastJets(lfs, FastJets::TRACKJET, 0.7), "TrackJet");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+    
+    // Book histograms
+    void init() {
+      _numTowardMB = bookProfile1D(3, 1, 1);
+      _numTransMB = bookProfile1D(3, 1, 2);
+      _numAwayMB = bookProfile1D(3, 1, 3);
+      _numTowardJ20 = bookProfile1D(4, 1, 1);
+      _numTransJ20 = bookProfile1D(4, 1, 2);
+      _numAwayJ20 = bookProfile1D(4, 1, 3);
+      
+      _ptsumTowardMB = bookProfile1D(5, 1, 1);
+      _ptsumTransMB = bookProfile1D(5, 1, 2);
+      _ptsumAwayMB = bookProfile1D(5, 1, 3);
+      _ptsumTowardJ20 = bookProfile1D(6, 1, 1);
+      _ptsumTransJ20 = bookProfile1D(6, 1, 2);
+      _ptsumAwayJ20 = bookProfile1D(6, 1, 3);
+      
+      _ptTrans2 = bookHistogram1D(7, 1, 1);
+      _ptTrans5 = bookHistogram1D(7, 1, 2);
+      _ptTrans30 = bookHistogram1D(7, 1, 3);
+    }
+    
+
+    /// Do the analysis
+    void analyze(const Event& event) {
+      
+      // Analyse, with pT > 0.5 GeV AND |eta| < 1
+      const JetAlg& tj = applyProjection<JetAlg>(event, "TrackJet");
+      
+      // Get jets, sorted by pT
+      const Jets jets = tj.jetsByPt();
+      if (jets.empty()) { 
+        vetoEvent; 
+      }
+
+      Jet leadingJet = jets.front();
+      const double phiLead = leadingJet.ptWeightedPhi();
+      const double ptLead = leadingJet.ptSum();
+      
+      // Cut on highest pT jet: combined 0.5 GeV < pT(lead) < 50 GeV
+      if (ptLead/GeV < 0.5) vetoEvent;
+      if (ptLead/GeV > 50.0) vetoEvent;
+
+      // Get the event weight
+      const double weight = event.weight();
+
+      // Count sum of all event weights in three pTlead regions
+      if (ptLead/GeV > 2.0) {
+        _sumWeightsPtLead2 += weight;
+      }
+      if (ptLead/GeV > 5.0) { 
+        _sumWeightsPtLead5 += weight;
+      }
+      if (ptLead/GeV > 30.0) {
+        _sumWeightsPtLead30 += weight;
+      }
+      
+      // Run over tracks
+      double ptSumToward(0.0), ptSumAway(0.0), ptSumTrans(0.0);
+      size_t numToward(0), numTrans(0), numAway(0);
+      foreach (const Jet& j, jets) {
+        foreach (const FourMomentum& p, j) {
+          // Calculate Delta(phi) from leading jet
+          const double dPhi = deltaPhi(p.azimuthalAngle(), phiLead);
+          
+          // Get pT sum and multiplicity values for each region 
+          // (each is 1 number for each region per event)
+          /// @todo Include event weight factor?
+          if (dPhi < PI/3.0) {
+            ptSumToward += p.pT();
+            ++numToward;
+            
+          } else if (dPhi < 2*PI/3.0) {
+            ptSumTrans += p.pT();
+            ++numTrans;
+            // Fill transverse pT distributions
+            if (ptLead/GeV > 2.0) {
+              _ptTrans2->fill(p.pT()/GeV, weight);
+              _totalNumTrans2 += weight;
+            }
+            if (ptLead/GeV > 5.0) { 
+              _ptTrans5->fill(p.pT()/GeV, weight);
+              _totalNumTrans5 += weight;
+            }
+            if (ptLead/GeV > 30.0) {
+              _ptTrans30->fill(p.pT()/GeV, weight);
+              _totalNumTrans30 += weight;
+            }
+            
+          } else {
+            ptSumAway += p.pT();
+            ++numAway;
+          }
+          
+        }
+      }
+      
+      // Log some event details
+      getLog() << Log::DEBUG 
+               << "pT [lead; twd, away, trans] = ["
+               << ptLead << "; " 
+               << ptSumToward << ", " 
+               << ptSumAway << ", " 
+               << ptSumTrans << "]" 
+               << endl;
+      
+      // Update the pT profile histograms
+      _ptsumTowardMB->fill(ptLead/GeV, ptSumToward/GeV, weight);
+      _ptsumTowardJ20->fill(ptLead/GeV, ptSumToward/GeV, weight);
+      
+      _ptsumTransMB->fill(ptLead/GeV, ptSumTrans/GeV, weight);
+      _ptsumTransJ20->fill(ptLead/GeV, ptSumTrans/GeV, weight);
+      
+      _ptsumAwayMB->fill(ptLead/GeV, ptSumAway/GeV, weight);
+      _ptsumAwayJ20->fill(ptLead/GeV, ptSumAway/GeV, weight);
+      
+      // Log some event details
+      getLog() << Log::DEBUG 
+               << "N [twd, away, trans] = ["
+               << numToward << ", " 
+               << numTrans << ", " 
+               << numAway << "]" 
+               << endl;
+      
+      // Update the N_jet profile histograms
+      _numTowardMB->fill(ptLead/GeV, numToward, weight);
+      _numTowardJ20->fill(ptLead/GeV, numToward, weight);
+      
+      _numTransMB->fill(ptLead/GeV, numTrans, weight);
+      _numTransJ20->fill(ptLead/GeV, numTrans, weight);
+      
+      _numAwayMB->fill(ptLead/GeV, numAway, weight);
+      _numAwayJ20->fill(ptLead/GeV, numAway, weight);
+    }
+    
+    
+    /// Normalize histos
+    void finalize() {
+      normalize(_ptTrans2, _totalNumTrans2 / _sumWeightsPtLead2);
+      normalize(_ptTrans5, _totalNumTrans5 / _sumWeightsPtLead5);
+      normalize(_ptTrans30, _totalNumTrans30 / _sumWeightsPtLead30);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// Sum total number of charged particles in the trans region, in 3 \f$ p_\perp^\text{lead} \f$ bins.
+    double _totalNumTrans2, _totalNumTrans5, _totalNumTrans30;
+
+    /// Sum the total number of events in 3 \f$ p_\perp^\text{lead} \f$ bins.
+    double _sumWeightsPtLead2,_sumWeightsPtLead5, _sumWeightsPtLead30;
+
+
+    /// @name Histogram collections
+    //@{
+    /// Profile histograms, binned in the \f$ p_T \f$ of the leading jet, for
+    /// the \f$ p_T \f$ sum in the toward, transverse and away regions.
+    AIDA::IProfile1D *_ptsumTowardMB,  *_ptsumTransMB,  *_ptsumAwayMB;
+    AIDA::IProfile1D *_ptsumTowardJ20, *_ptsumTransJ20, *_ptsumAwayJ20;
+
+    /// Profile histograms, binned in the \f$ p_T \f$ of the leading jet, for
+    /// the number of charged particles per jet in the toward, transverse and
+    /// away regions.
+    AIDA::IProfile1D *_numTowardMB,  *_numTransMB,  *_numAwayMB;
+    AIDA::IProfile1D *_numTowardJ20, *_numTransJ20, *_numAwayJ20;
+
+    /// Histogram of \f$ p_T \f$ distribution for 3 different \f$ p_{T1} \f$ IR cutoffs.
+    AIDA::IHistogram1D *_ptTrans2, *_ptTrans5, *_ptTrans30;
+    //@}
+
+  };
+
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2001_S4751469> plugin_CDF_2001_S4751469;
+
+}

Copied: trunk/src/Analyses/CDF_2002_S4796047.cc (from r1802, trunk/src/Analyses/CDF/CDF_2002_S4796047.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2002_S4796047.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2002_S4796047.cc)
@@ -0,0 +1,115 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+  /*
+   * @brief CDF Run I charged multiplicity measurement
+   * @author Hendrik Hoeth
+   * 
+   * This analysis measures the charged multiplicity distribution
+   * in minimum bias events at two different center-of-mass energies:
+   * \f$ \sqrt{s} = \f$ 630 and 1800 GeV.
+   * 
+   * Particles with c*tau > 10 mm are considered stable, i.e. they
+   * are reconstructed and their decay products removed. Selection
+   * cuts are |eta|<1 and pT>0.4 GeV.
+   * 
+   * 
+   * @par Run conditions
+   * 
+   * @arg Two different beam energies: \f$ \sqrt{s} = \$f 630 & 1800 GeV
+   * @arg Run with generic QCD events.
+   * @arg Set particles with c*tau > 10 mm stable
+   * 
+   */
+  class CDF_2002_S4796047 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.4 \f$ GeV.
+    CDF_2002_S4796047()
+      : Analysis("CDF_2002_S4796047")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(Beam(), "Beam");
+      const ChargedFinalState cfs(-1.0, 1.0, 0.4*GeV);
+      addProjection(cfs, "FS");
+    }
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      _hist_multiplicity_630  = bookHistogram1D(1, 1, 1);
+      _hist_multiplicity_1800 = bookHistogram1D(2, 1, 1);
+      _hist_pt_vs_multiplicity_630  = bookProfile1D(3, 1, 1);
+      _hist_pt_vs_multiplicity_1800 = bookProfile1D(4, 1, 1);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      const double sqrtS = applyProjection<Beam>(e, "Beam").sqrtS();
+      const ChargedFinalState& fs = applyProjection<ChargedFinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+
+      // Get the event weight
+      const double weight = e.weight();
+
+      // Fill histos of charged multiplicity distributions
+      if (fuzzyEquals(sqrtS, 630/GeV)) {
+        _hist_multiplicity_630->fill(numParticles, weight);
+      } 
+      else if (fuzzyEquals(sqrtS, 1800/GeV)) {
+        _hist_multiplicity_1800->fill(numParticles, weight);
+      }
+
+      // Fill histos for <pT> vs. charged multiplicity
+      foreach (const Particle& p, fs.particles()) {
+        const double pT = p.momentum().pT();
+        if (fuzzyEquals(sqrtS, 630/GeV)) {
+          _hist_pt_vs_multiplicity_630->fill(numParticles, pT/GeV, weight);
+        }
+        else if (fuzzyEquals(sqrtS, 1800/GeV)) {
+          _hist_pt_vs_multiplicity_1800->fill(numParticles, pT/GeV, weight);
+        }
+      }
+      
+    }
+    
+
+    void finalize() {
+      /// @todo Get cross-section from the generator
+      normalize(_hist_multiplicity_630, 3.21167);
+      normalize(_hist_multiplicity_1800, 4.19121);
+    }
+
+    //@}
+
+
+  private:
+
+    AIDA::IHistogram1D *_hist_multiplicity_630;
+    AIDA::IHistogram1D *_hist_multiplicity_1800;
+    AIDA::IProfile1D   *_hist_pt_vs_multiplicity_630 ;
+    AIDA::IProfile1D   *_hist_pt_vs_multiplicity_1800;
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2002_S4796047> plugin_CDF_2002_S4796047;
+
+}

Copied: trunk/src/Analyses/CDF_2004_S5839831.cc (from r1802, trunk/src/Analyses/CDF/CDF_2004_S5839831.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2004_S5839831.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2004_S5839831.cc)
@@ -0,0 +1,376 @@
+// -*- C++ -*-
+// "Acosta" underlying event analysis at CDF, inc. "Swiss Cheese"
+
+#include "Rivet/Analysis.hh"
+#include "Rivet/Jet.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /**
+   * @brief "Acosta" CDF underlying event analysis
+   * @author Andy Buckley
+   */
+  class CDF_2004_S5839831 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on charged final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.4 \f$ GeV.
+    CDF_2004_S5839831() 
+      : Analysis("CDF_2004_S5839831") 
+    {
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(Beam(), "Beam");
+      const FinalState calofs(-1.2, 1.2);
+      addProjection(calofs, "CaloFS");
+      addProjection(FastJets(calofs, FastJets::CDFJETCLU, 0.7), "Jets");
+      const ChargedFinalState trackfs(-1.2, 1.2, 0.4*GeV);
+      addProjection(trackfs, "TrackFS");
+      // Restrict tracks to |eta| < 0.7 for the min bias part.
+      const ChargedFinalState mbfs(-0.7, 0.7, 0.4*GeV);
+      addProjection(mbfs, "MBFS");
+      // Restrict tracks to |eta| < 1 for the Swiss-Cheese part.
+      const ChargedFinalState cheesefs(-1.0, 1.0, 0.4*GeV);
+      addProjection(cheesefs, "CheeseFS");
+      addProjection(FastJets(cheesefs, FastJets::CDFJETCLU, 0.7), "CheeseJets");
+    }
+    
+    //@}
+
+
+  private:
+
+    struct ConesInfo {
+      ConesInfo() : numMax(0), numMin(0), ptMax(0), ptMin(0), ptDiff(0) {}
+      unsigned int numMax, numMin;
+      double ptMax, ptMin, ptDiff;
+    };
+    
+    
+    ConesInfo _calcTransCones(const double etaLead, const double phiLead, 
+                              const ParticleVector& tracks) {
+      const double phiTransPlus = mapAngle0To2Pi(phiLead + PI/2.0);
+      const double phiTransMinus = mapAngle0To2Pi(phiLead - PI/2.0);
+      getLog() << Log::DEBUG << "phi_lead = " << phiLead 
+               << " -> trans = (" << phiTransPlus 
+               << ", " << phiTransMinus << ")" << endl;
+      
+      unsigned int numPlus(0), numMinus(0);
+      double ptPlus(0), ptMinus(0);
+      // Run over all charged tracks
+      foreach (const Particle& t, tracks) {
+        FourMomentum trackMom = t.momentum();
+        const double pt = trackMom.pT();
+        
+        // Find if track mom is in either transverse cone
+        if (deltaR(trackMom, etaLead, phiTransPlus) < 0.7) {
+          ptPlus += pt;
+          numPlus += 1;
+        } else if (deltaR(trackMom, etaLead, phiTransMinus) < 0.7) {
+          ptMinus += pt;
+          numMinus += 1;
+        }
+      }
+      
+      ConesInfo rtn;
+      // Assign N_{min,max} from N_{plus,minus}
+      rtn.numMax = (ptPlus >= ptMinus) ? numPlus : numMinus;
+      rtn.numMin = (ptPlus >= ptMinus) ? numMinus : numPlus;
+      // Assign pT_{min,max} from pT_{plus,minus}
+      rtn.ptMax = (ptPlus >= ptMinus) ? ptPlus : ptMinus;
+      rtn.ptMin = (ptPlus >= ptMinus) ? ptMinus : ptPlus;
+      rtn.ptDiff = fabs(rtn.ptMax - rtn.ptMin);
+      
+      getLog() << Log::DEBUG << "Min cone has " << rtn.numMin << " tracks -> " 
+               << "pT_min = " << rtn.ptMin/GeV << " GeV" << endl;
+      getLog() << Log::DEBUG << "Max cone has " << rtn.numMax << " tracks -> " 
+               << "pT_max = " << rtn.ptMax/GeV << " GeV" << endl;
+      
+      return rtn;
+    }
+    
+    
+    ConesInfo _calcTransCones(const FourMomentum& leadvec, 
+                              const ParticleVector& tracks) {
+      const double etaLead = leadvec.pseudorapidity();
+      const double phiLead = leadvec.azimuthalAngle();
+      return _calcTransCones(etaLead, phiLead, tracks);
+    }
+
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      _pt90MaxAvg1800 = bookProfile1D(1, 1, 1);
+      _pt90MinAvg1800 = bookProfile1D(1, 1, 2); 
+      _pt90Max1800 = bookProfile1D(2, 1, 1);
+      _pt90Min1800 = bookProfile1D(2, 1, 2);
+      _pt90Diff1800 = bookProfile1D(2, 1, 3);
+      _num90Max1800 = bookProfile1D(4, 1, 1);
+      _num90Min1800 = bookProfile1D(4, 1, 2);
+      _pTSum1800_2Jet = bookProfile1D(7, 1, 1);
+      _pTSum1800_3Jet = bookProfile1D(7, 1, 2);
+      _pt90Max630 = bookProfile1D(8, 1, 1); 
+      _pt90Min630 = bookProfile1D(8, 1, 2); 
+      _pt90Diff630 = bookProfile1D(8, 1, 3); 
+      _pTSum630_2Jet = bookProfile1D(9, 1, 1);
+      _pTSum630_3Jet = bookProfile1D(9, 1, 2);       
+      
+      _pt90Dbn1800Et40 = bookHistogram1D(3, 1, 1);
+      _pt90Dbn1800Et80 = bookHistogram1D(3, 1, 2);
+      _pt90Dbn1800Et120 = bookHistogram1D(3, 1, 3);
+      _pt90Dbn1800Et160 = bookHistogram1D(3, 1, 4);
+      _pt90Dbn1800Et200 = bookHistogram1D(3, 1, 5);
+      _ptDbn1800MB = bookHistogram1D(6, 1, 1);
+      
+      _numTracksDbn1800MB = bookHistogram1D(5, 1, 1);
+      _numTracksDbn630MB = bookHistogram1D(10, 1, 1);
+      _ptDbn630MB = bookHistogram1D(11, 1, 1);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const double weight = event.weight();
+      
+      {
+        getLog() << Log::DEBUG << "Running max/min analysis" << endl;
+        vector<Jet> jets = applyProjection<JetAlg>(event, "Jets").jetsByE();
+        if (!jets.empty()) {
+          // Leading jet must be in central |eta| < 0.5 region
+          const Jet leadingjet = jets.front();
+          const double etaLead = leadingjet.momentum().eta();
+          // Get Et of the leading jet: used to bin histograms
+          const double ETlead = leadingjet.EtSum();
+          getLog() << Log::DEBUG << "Leading Et = " << ETlead/GeV << " GeV" << endl;
+          if (fabs(etaLead) > 0.5 && ETlead < 15*GeV) {
+            getLog() << Log::DEBUG << "Leading jet eta = " << etaLead 
+                     << " not in |eta| < 0.5 & pT > 15 GeV" << endl;
+          } else {
+            // Multiplicity & pT distributions for sqrt(s) = 630 GeV, 1800 GeV
+            const ParticleVector tracks = applyProjection<FinalState>(event, "TrackFS").particles();
+            const ConesInfo cones = _calcTransCones(leadingjet.momentum(), tracks);
+            if (fuzzyEquals(sqrtS/GeV, 630)) {
+              _pt90Max630->fill(ETlead/GeV, cones.ptMax/GeV, weight);
+              _pt90Min630->fill(ETlead/GeV, cones.ptMin/GeV, weight);
+              _pt90Diff630->fill(ETlead/GeV, cones.ptDiff/GeV, weight);
+            } else if (fuzzyEquals(sqrtS/GeV, 1800)) {
+              _num90Max1800->fill(ETlead/GeV, cones.numMax, weight);
+              _num90Min1800->fill(ETlead/GeV, cones.numMin, weight);
+              _pt90Max1800->fill(ETlead/GeV, cones.ptMax/GeV, weight);
+              _pt90Min1800->fill(ETlead/GeV, cones.ptMin/GeV, weight);
+              _pt90Diff1800->fill(ETlead/GeV, cones.ptDiff/GeV, weight);
+              _pt90MaxAvg1800->fill(ETlead/GeV, cones.ptMax/GeV, weight); // /numMax
+              _pt90MinAvg1800->fill(ETlead/GeV, cones.ptMin/GeV, weight); // /numMin
+              //
+              const double ptTransTotal = cones.ptMax + cones.ptMin;
+              if (inRange(ETlead/GeV, 40, 80)) {
+                _pt90Dbn1800Et40->fill(ptTransTotal/GeV, weight);
+              } else if (inRange(ETlead/GeV, 80, 120)) {
+                _pt90Dbn1800Et80->fill(ptTransTotal/GeV, weight);
+              } else if (inRange(ETlead/GeV, 120, 160)) {
+                _pt90Dbn1800Et120->fill(ptTransTotal/GeV, weight);
+              } else if (inRange(ETlead/GeV, 160, 200)) {
+                _pt90Dbn1800Et160->fill(ptTransTotal/GeV, weight);
+              } else if (inRange(ETlead/GeV, 200, 270)) {
+                _pt90Dbn1800Et200->fill(ptTransTotal/GeV, weight);
+              }
+            }
+            
+          }
+        }
+      }
+      
+      
+      // Fill min bias total track multiplicity histos
+      {
+        getLog() << Log::DEBUG << "Running min bias multiplicity analysis" << endl;
+        const ParticleVector mbtracks = applyProjection<FinalState>(event, "MBFS").particles();
+        if (fuzzyEquals(sqrtS/GeV, 1800)) {
+          _numTracksDbn1800MB->fill(mbtracks.size(), weight);
+        } else if (fuzzyEquals(sqrtS/GeV, 630)) {
+          _numTracksDbn630MB->fill(mbtracks.size(), weight);
+        }
+        // Run over all charged tracks
+        foreach (const Particle& t, mbtracks) {
+          FourMomentum trackMom = t.momentum();
+          const double pt = trackMom.pT();
+          // Plot total pT distribution for min bias
+          if (fuzzyEquals(sqrtS/GeV, 1800)) {
+            _ptDbn1800MB->fill(pt/GeV, weight);
+          } else if (fuzzyEquals(sqrtS/GeV, 630)) {
+            _ptDbn630MB->fill(pt/GeV, weight);
+          }
+        }
+      }
+      
+      
+      
+      // Construct "Swiss Cheese" pT distributions, with pT contributions from
+      // tracks within R = 0.7 of the 1st, 2nd (and 3rd) jets being ignored. A
+      // different set of charged tracks, with |eta| < 1.0, is used here, and all
+      // the removed jets must have Et > 5 GeV.
+      {
+        getLog() << Log::DEBUG << "Running Swiss Cheese analysis" << endl;
+        const ParticleVector cheesetracks = applyProjection<FinalState>(event, "CheeseFS").particles();
+        vector<Jet> cheesejets = applyProjection<JetAlg>(event, "Jets").jetsByE();
+        if (cheesejets.empty()) {
+          getLog() << Log::DEBUG << "No 'cheese' jets found in event" << endl;
+          return;
+        }
+        if (cheesejets.size() > 1 &&
+            fabs(cheesejets[0].momentum().pseudorapidity()) <= 0.5 &&
+            cheesejets[0].momentum().Et()/GeV > 5.0 &&
+            cheesejets[1].momentum().Et()/GeV > 5.0) {
+          
+          const double cheeseETlead = cheesejets[0].momentum().Et();
+          
+          const double eta1 = cheesejets[0].momentum().pseudorapidity();
+          const double phi1 = cheesejets[0].momentum().azimuthalAngle();
+          const double eta2 = cheesejets[1].momentum().pseudorapidity();
+          const double phi2 = cheesejets[1].momentum().azimuthalAngle();
+          
+          double ptSumSub2(0), ptSumSub3(0);
+          foreach (const Particle& t, cheesetracks) {
+            FourMomentum trackMom = t.momentum();
+            const double pt = trackMom.pT();
+            
+            // Subtracting 2 leading jets
+            const double deltaR1 = deltaR(trackMom, eta1, phi1);
+            const double deltaR2 = deltaR(trackMom, eta2, phi2);
+            getLog() << Log::TRACE << "Track vs jet(1): "
+                     << "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
+                     << "|(" << eta1 << ", " << phi1 << ")| = " << deltaR1 << endl;
+            getLog() << Log::TRACE << "Track vs jet(2): "
+                     << "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
+                     << "|(" << eta2 << ", " << phi2 << ")| = " << deltaR2 << endl;
+            if (deltaR1 > 0.7 && deltaR2 > 0.7) {
+              ptSumSub2 += pt;
+              
+              // Subtracting 3rd leading jet
+              if (cheesejets.size() > 2 && 
+                  cheesejets[2].momentum().Et()/GeV > 5.0) {
+                const double eta3 = cheesejets[2].momentum().pseudorapidity();
+                const double phi3 = cheesejets[2].momentum().azimuthalAngle();
+                const double deltaR3 = deltaR(trackMom, eta3, phi3);
+                getLog() << Log::TRACE << "Track vs jet(3): "
+                         << "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
+                         << "|(" << eta3 << ", " << phi3 << ")| = " << deltaR3 << endl;
+                if (deltaR3 > 0.7) {
+                  ptSumSub3 += pt;
+                }
+              }
+            }
+          }
+          
+          // Swiss Cheese sub 2,3 jets distributions for sqrt(s) = 630 GeV, 1800 GeV
+          if (fuzzyEquals(sqrtS/GeV, 630)) {
+            _pTSum630_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
+            _pTSum630_3Jet->fill(cheeseETlead/GeV, ptSumSub3/GeV, weight);
+          } else if (fuzzyEquals(sqrtS/GeV, 1800)) {
+            _pTSum1800_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
+            _pTSum1800_3Jet->fill(cheeseETlead/GeV, ptSumSub3/GeV, weight);
+          }
+          
+        }
+      }      
+      
+    }
+    
+    
+    void finalize() { 
+      // Normalize to actual number of entries in pT dbn histos
+      /// @todo Check this normalisation defn.
+      normalize(_pt90Dbn1800Et40,  1656.75);
+      /// @todo Check this normalisation defn.
+      normalize(_pt90Dbn1800Et80,  4657.5);
+      /// @todo Check this normalisation defn.
+      normalize(_pt90Dbn1800Et120, 5395.5);
+      /// @todo Check this normalisation defn.
+      normalize(_pt90Dbn1800Et160, 7248.75);
+      /// @todo Check this normalisation defn.
+      normalize(_pt90Dbn1800Et200, 2442.0);
+      
+      // and for min bias distributions:
+      /// @todo Check this normalisation defn.
+      normalize(_numTracksDbn1800MB, 309718.25);
+      /// @todo Check this normalisation defn.
+      normalize(_numTracksDbn630MB, 1101024.0);
+      /// @todo Check this normalisation defn.
+      normalize(_ptDbn1800MB, 33600.0);
+      /// @todo Check this normalisation defn.
+      normalize(_ptDbn630MB, 105088.0);
+    }
+
+    //@}
+
+  private:
+
+    /// @name Histogram collections
+    //@{
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the average \f$ p_T \f$ in the toward, transverse and away regions at 
+    /// \f$ \sqrt{s} = 1800 \text{GeV} \f$.
+    /// Corresponds to Table 1, and HepData table 1.
+    AIDA::IProfile1D *_pt90MaxAvg1800, *_pt90MinAvg1800;
+
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the \f$ p_T \f$ sum in the toward, transverse and away regions at 
+    /// \f$ \sqrt{s} = 1800 \text{GeV} \f$.
+    /// Corresponds to figure 2/3, and HepData table 2.
+    AIDA::IProfile1D *_pt90Max1800, *_pt90Min1800, *_pt90Diff1800;
+
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the \f$ p_T \f$ sum in the toward, transverse and away regions at
+    /// at \f$ \sqrt{s} = 630 \text{GeV} \f$.
+    /// Corresponds to figure 8, and HepData table 8.
+    AIDA::IProfile1D *_pt90Max630, *_pt90Min630, *_pt90Diff630;
+
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the cone track multiplicity at \f$ \sqrt{s} = 1800 \text{GeV} \f$.
+    /// Corresponds to figure 5, and HepData table 4.
+    AIDA::IProfile1D *_num90Max1800, *_num90Min1800;
+
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the \f$ p_T \f$ sum at \f$ \sqrt{s} = 1800 \text{GeV} \f$.
+    /// Corresponds to figure 7, and HepData table 7.
+    AIDA::IProfile1D *_pTSum1800_2Jet, *_pTSum1800_3Jet;
+
+    /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
+    /// the \f$ p_T \f$ sum at \f$ \sqrt{s} = 630 \text{GeV} \f$.
+    /// Corresponds to figure 9, and HepData table 9.
+    AIDA::IProfile1D *_pTSum630_2Jet, *_pTSum630_3Jet;
+
+    /// Histogram of \f$ p_{T\text{sum}} \f$ distribution for 5 different 
+    /// \f$ E_{T1} \f$ bins.
+    /// Corresponds to figure 4, and HepData table 3.
+    AIDA::IHistogram1D *_pt90Dbn1800Et40, *_pt90Dbn1800Et80, *_pt90Dbn1800Et120, 
+      *_pt90Dbn1800Et160, *_pt90Dbn1800Et200;
+
+    /// Histograms of track multiplicity and \f$ p_T \f$ distributions for 
+    /// minimum bias events.
+    /// Figure 6, and HepData tables 5 & 6.
+    /// Figure 10, and HepData tables 10 & 11.
+    AIDA::IHistogram1D *_numTracksDbn1800MB, *_ptDbn1800MB;
+    AIDA::IHistogram1D *_numTracksDbn630MB, *_ptDbn630MB;
+    //@}
+    
+  };
+    
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2004_S5839831> plugin_CDF_2004_S5839831;
+
+}

Copied: trunk/src/Analyses/CDF_2005_S6080774.cc (from r1802, trunk/src/Analyses/CDF/CDF_2005_S6080774.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2005_S6080774.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2005_S6080774.cc)
@@ -0,0 +1,114 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/IdentifiedFinalState.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+
+namespace Rivet {
+
+
+  class CDF_2005_S6080774 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2005_S6080774() : Analysis("CDF_2005_S6080774") {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      FinalState fs;
+      addProjection(fs, "FS");
+      
+      IdentifiedFinalState ifs(-0.9, 0.9, 13.0*GeV);
+      ifs.acceptId(PHOTON);
+      addProjection(ifs, "IFS");
+    }
+
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      for (size_t yAxisId=1; yAxisId<5; ++yAxisId) {
+        _h_m_PP.push_back(bookHistogram1D(1, 1, yAxisId));
+        _h_pT_PP.push_back(bookHistogram1D(2, 1, yAxisId));
+        _h_dphi_PP.push_back(bookHistogram1D(3, 1, yAxisId));
+      }
+    }
+
+
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      ParticleVector photons = applyProjection<IdentifiedFinalState>(event, "IFS").particles();
+      
+      if (photons.size() < 2 ||
+          (photons[0].momentum().pT() < 14.0*GeV && photons[1].momentum().pT() < 14.0*GeV)) {
+        vetoEvent;
+      }
+      
+      // Isolate photons with ET_sum in cone
+      ParticleVector isolated_photons;
+      ParticleVector fs = applyProjection<FinalState>(event, "FS").particles();
+      foreach (const Particle& photon, photons) {
+        FourMomentum mom_in_cone;
+        double eta_P = photon.momentum().eta();
+        double phi_P = photon.momentum().phi();
+        foreach (const Particle& p, fs) {
+          if (deltaR(eta_P, phi_P, p.momentum().eta(), p.momentum().phi()) < 0.4) {
+            mom_in_cone += p.momentum();
+          }
+        }
+        if (mom_in_cone.Et()-photon.momentum().Et() < 1.0*GeV) {
+          isolated_photons.push_back(photon);
+        }
+      }
+      
+      if (isolated_photons.size() != 2) {
+        vetoEvent;
+      }
+      
+      FourMomentum mom_PP = isolated_photons[0].momentum() + isolated_photons[1].momentum();
+      for (size_t i=0; i<4; ++i) {
+        _h_m_PP[i]->fill(mom_PP.mass(), weight);
+        _h_pT_PP[i]->fill(mom_PP.pT(), weight);
+        _h_dphi_PP[i]->fill(mapAngle0ToPi(isolated_photons[0].momentum().phi()-
+                                          isolated_photons[1].momentum().phi())/M_PI, weight);
+      }      
+    }
+    
+    
+    void finalize() {
+      for (size_t i=0; i<4; ++i) {
+        scale(_h_m_PP[i], crossSection()/sumOfWeights());
+        scale(_h_pT_PP[i], crossSection()/sumOfWeights());
+        scale(_h_dphi_PP[i], crossSection()/sumOfWeights());
+      }  
+    }
+
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    std::vector<AIDA::IHistogram1D*> _h_m_PP;
+    std::vector<AIDA::IHistogram1D*> _h_pT_PP;
+    std::vector<AIDA::IHistogram1D*> _h_dphi_PP;
+    //@}
+    
+    
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2005_S6080774> plugin_CDF_2005_S6080774;
+
+}

Copied: trunk/src/Analyses/CDF_2005_S6217184.cc (from r1802, trunk/src/Analyses/CDF/CDF_2005_S6217184.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2005_S6217184.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2005_S6217184.cc)
@@ -0,0 +1,144 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/JetShape.hh"
+
+namespace Rivet {
+
+  
+  /* CDF Run II jet shape analysis
+   * @author Lars Sonnenschein
+   * @author Andy Buckley
+   */	
+  class CDF_2005_S6217184 : public Analysis {    
+  public:
+    
+    /// @name Constructors etc.
+    //@{
+    
+    /// Constructor
+    CDF_2005_S6217184()
+      : Analysis("CDF_2005_S6217184")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      
+      const FinalState fs(-2.0, 2.0);
+      addProjection(fs, "FS");
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7), "Jets"); 
+      addProjection(TotalVisibleMomentum(fs), "CalMET");
+      addProjection(PVertex(), "PV");
+      
+      // Veto (anti)neutrinos, and muons with pT above 1.0 GeV
+      VetoedFinalState vfs(fs);
+      vfs.vetoNeutrinos();
+      vfs.addVetoDetail(MUON, 1.0*GeV, MAXDOUBLE);
+      addProjection(vfs, "VFS");
+      addProjection(JetShape(vfs, _jetaxes, 0.0, 0.7, 0.1, 0.3), "JetShape");
+      
+      // Specify pT bins
+      _pTbins += 37.0, 45.0, 55.0, 63.0, 73.0, 84.0, 97.0, 112.0, 128.0, 
+        148.0, 166.0, 186.0, 208.0, 229.0, 250.0, 277.0, 304.0, 340.0, 380.0;
+    }
+    
+    //@}
+    
+    
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      // 18 = 6x3 pT bins, one histogram each
+      for (size_t i = 0; i < 6; ++i) { 
+        for (size_t j = 0; j < 3; ++j) {
+          size_t k = i*3 + j;
+          _profhistRho_pT[k] = bookProfile1D(i+1, 1, j+1);
+          _profhistPsi_pT[k] = bookProfile1D(6+i+1, 1, j+1);
+        }
+      }    
+      
+      _profhistPsi = bookProfile1D(13, 1, 1);
+    }
+    
+    
+    
+    /// Do the analysis
+    void analyze(const Event& event) {
+      
+      // Get jets and require at least one to pass pT and y cuts
+      const Jets jets = applyProjection<FastJets>(event, "Jets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity before cuts = " << jets.size() << endl;
+      
+      // Determine the central jet axes
+      _jetaxes.clear();
+      foreach (const Jet& jt, jets) {
+        const FourMomentum pj = jt.momentum();
+        if (inRange(pj.pT()/GeV, 37.0, 380.0) && inRange(fabs(pj.rapidity()), 0.1, 0.7)) {
+          _jetaxes.push_back(jt.momentum());
+        }
+      }
+      if (_jetaxes.empty()) vetoEvent;
+      
+      // Calculate and histogram jet shapes
+      const double weight = event.weight();
+      const JetShape& js = applyProjection<JetShape>(event, "JetShape");
+      
+      /// @todo Use BinnedHistogram, for collections of histos each for a range of values of an extra variable
+      for (size_t jind = 0; jind < _jetaxes.size(); ++jind) {
+        for (size_t ipT = 0; ipT < 18; ++ipT) {
+          if (_jetaxes[jind].pT() > _pTbins[ipT] && _jetaxes[jind].pT() <= _pTbins[ipT+1]) {
+            for (size_t rbin = 0; rbin < js.numBins(); ++rbin) {
+              const double rad_Rho = js.rMin() + (rbin+0.5)*js.interval();
+              _profhistRho_pT[ipT]->fill(rad_Rho/0.7, (0.7/1.0)*js.diffJetShape(jind, rbin), weight);
+              /// @todo Calc int histos from diff histos
+              const double rad_Psi = js.rMin() +(rbin+1.0)*js.interval();
+              _profhistPsi_pT[ipT]->fill(rad_Psi/0.7, js.intJetShape(jind, rbin), weight);
+            }
+            /// @todo Calc int histos from diff histos
+            _profhistPsi->fill((_pTbins[ipT] + _pTbins[ipT+1])/2.0, js.psi(jind), weight);
+          }
+        }
+      }
+      
+    }
+    
+    
+    // Finalize
+    void finalize() {  
+      //
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Analysis data
+    //@{
+
+    /// Vector of jet axes
+    vector<FourMomentum> _jetaxes;
+
+    /// \f$p_\perp\f$ bins to be distinguished during analysis
+    vector<double> _pTbins;
+    //@}
+
+
+    /// @name Histograms
+    //@{
+    AIDA::IProfile1D* _profhistRho_pT[18];
+    AIDA::IProfile1D* _profhistPsi_pT[18];
+    AIDA::IProfile1D* _profhistPsi;
+    //@}
+
+  };
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2005_S6217184> plugin_CDF_2005_S6217184;
+
+}

Copied: trunk/src/Analyses/CDF_2006_S6450792.cc (from r1802, trunk/src/Analyses/CDF/CDF_2006_S6450792.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2006_S6450792.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2006_S6450792.cc)
@@ -0,0 +1,71 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+  class CDF_2006_S6450792 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2006_S6450792() 
+      : Analysis("CDF_2006_S6450792") {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      FinalState fs;
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7, 61.0*GeV), "ConeFinder");
+    }
+    //@}
+    
+    
+  public:
+    
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      _h_jet_pt = bookHistogram1D(1, 1, 1);
+    }
+
+
+    void analyze(const Event& event) {
+      const Jets& jets = applyProjection<JetAlg>(event, "ConeFinder").jets();
+      foreach (const Jet& jet, jets) {
+        double y = fabs(jet.momentum().rapidity());
+        if (inRange(y, 0.1, 0.7)) {
+          _h_jet_pt->fill(jet.momentum().pT()/GeV, event.weight());
+        }
+      }
+    }
+    
+    
+    void finalize() {
+      const double delta_y = 1.2;
+      scale(_h_jet_pt, crossSection()/nanobarn/sumOfWeights()/delta_y);
+    }
+
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+
+    AIDA::IHistogram1D *_h_jet_pt;
+    //@}
+
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2006_S6450792> plugin_CDF_2006_S6450792;
+
+}

Copied: trunk/src/Analyses/CDF_2006_S6653332.cc (from r1802, trunk/src/Analyses/CDF/CDF_2006_S6653332.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2006_S6653332.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2006_S6653332.cc)
@@ -0,0 +1,177 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/InvMassFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/ChargedLeptons.hh"
+
+namespace Rivet {
+
+
+  /* @brief CDF Run II analysis: jet \f$ p_T \f$ and \f$ \eta \f$ 
+   *   distributions in Z + (b) jet production
+   * @author Lars Sonnenschein
+   *
+   * This CDF analysis provides \f$ p_T \f$ and \f$ \eta \f$ distributions of
+   * jets in Z + (b) jet production, before and after tagging.
+   */
+  class CDF_2006_S6653332 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2006_S6653332()  
+      : Analysis("CDF_2006_S6653332"),
+        _Rjet(0.7), _JetPtCut(20.), _JetEtaCut(1.5),
+        _sumWeightsWithZ(0.0), _sumWeightsWithZJet(0.0)
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      const FinalState fs(-3.6, 3.6);
+      addProjection(fs, "FS");
+      
+      // Create a final state with any e+e- or mu+mu- pair with 
+      // invariant mass 76 -> 106 GeV and ET > 20 (Z decay products)
+      std::vector<std::pair<long,long> > vids;
+      vids.push_back(make_pair(ELECTRON, POSITRON));
+      vids.push_back(make_pair(MUON, ANTIMUON));
+      FinalState fs2(-3.6, 3.6);
+      InvMassFinalState invfs(fs2, vids, 76*GeV, 106*GeV);
+      addProjection(invfs, "INVFS");
+      
+      // Make a final state without the Z decay products for jet clustering
+      VetoedFinalState vfs(fs);
+      vfs.addVetoOnThisFinalState(invfs);
+      addProjection(vfs, "VFS");
+      addProjection(FastJets(vfs, FastJets::CDFMIDPOINT, 0.7), "Jets");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      // Book histograms
+      _sigmaBJet = bookHistogram1D(1, 1, 1);
+      _ratioBJetToZ = bookHistogram1D(2, 1, 1);
+      _ratioBJetToJet = bookHistogram1D(3, 1, 1);  
+    }
+
+  
+    /// Do the analysis
+    void analyze(const Event& event) {
+      // Check we have an l+l- pair that passes the kinematic cuts
+      // Get the Z decay products (mu+mu- or e+e- pair)
+      const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
+      const ParticleVector&  ZDecayProducts =  invMassFinalState.particles();
+      
+      // Make sure we have at least 2 Z decay products (mumu or ee) 
+      if (ZDecayProducts.size() < 2) vetoEvent;      
+      _sumWeightsWithZ += event.weight();      
+      // @todo: write out a warning if there are more than two decay products
+      FourMomentum Zmom = ZDecayProducts[0].momentum() +  ZDecayProducts[1].momentum();
+      
+      // Put all b-quarks in a vector
+      /// @todo Use jet contents rather than accessing quarks directly
+      ParticleVector bquarks;
+      /// @todo Use nicer looping
+      for (GenEvent::particle_const_iterator p = event.genEvent().particles_begin(); 
+           p != event.genEvent().particles_end(); ++p) {
+        if ( fabs((*p)->pdg_id()) == BQUARK ) {
+          bquarks.push_back(Particle(**p));
+        }
+      }
+      
+      // Get jets 
+      const FastJets& jetpro = applyProjection<FastJets>(event, "Jets");
+      getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
+      
+      const PseudoJets& jets = jetpro.pseudoJetsByPt();
+      getLog() << Log::DEBUG << "jetlist size = " << jets.size() << endl;
+      
+      int numBJet = 0;
+      int numJet  = 0;
+      // for each b-jet plot the ET and the eta of the jet, normalise to the total cross section at the end
+      // for each event plot N jet and pT(Z), normalise to the total cross section at the end 
+      for (PseudoJets::const_iterator jt = jets.begin(); jt != jets.end(); ++jt) {
+        // select jets that pass the kinematic cuts
+        if (jt->perp() > _JetPtCut && fabs(jt->rapidity()) <= _JetEtaCut) {
+          ++numJet;
+          // Does the jet contain a b-quark?
+          /// @todo Use jet contents rather than accessing quarks directly
+          
+          bool bjet = false;
+          foreach (const Particle& bquark,  bquarks) {
+            if (deltaR(jt->rapidity(), jt->phi(), bquark.momentum().rapidity(),bquark.momentum().azimuthalAngle()) <= _Rjet) {
+              bjet = true;
+              break;
+            }
+          } // end loop around b-jets
+          if (bjet) {
+            numBJet++;
+          }
+        }
+      } // end loop around jets
+      
+      if (numJet > 0)    _sumWeightsWithZJet += event.weight();
+      if (numBJet > 0) {
+        _sigmaBJet->fill(1960.0,event.weight());
+        _ratioBJetToZ->fill(1960.0,event.weight());
+        _ratioBJetToJet->fill(1960.0,event.weight());
+      }
+      
+    }
+    
+  
+    /// Finalize
+    void finalize() { 
+      getLog() << Log::DEBUG << "Total sum of weights = " << sumOfWeights() << endl;
+      getLog() << Log::DEBUG << "Sum of weights for Z production in mass range = " << _sumWeightsWithZ << endl;
+      getLog() << Log::DEBUG << "Sum of weights for Z+jet production in mass range = " << _sumWeightsWithZJet << endl;
+      
+      _sigmaBJet->scale(crossSection()/sumOfWeights());
+      _ratioBJetToZ->scale(1.0/_sumWeightsWithZ);
+      _ratioBJetToJet->scale(1.0/_sumWeightsWithZJet);
+    }
+    
+        //@}
+
+
+  private:
+
+    /// @name Cuts and counters
+    //@{
+
+    double _Rjet;
+    double _JetPtCut;
+    double _JetEtaCut;
+
+    double _sumWeightsWithZ;
+    double _sumWeightsWithZJet;
+
+    //@}
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _sigmaBJet;
+    AIDA::IHistogram1D* _ratioBJetToZ;
+    AIDA::IHistogram1D* _ratioBJetToJet;
+    //@}
+    
+  };
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2006_S6653332> plugin_CDF_2006_S6653332;
+
+}

Copied: trunk/src/Analyses/CDF_2007_S7057202.cc (from r1802, trunk/src/Analyses/CDF/CDF_2007_S7057202.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2007_S7057202.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2007_S7057202.cc)
@@ -0,0 +1,176 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/BinnedHistogram.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+  
+
+  /// @brief CDF Run II inclusive jet cross-section using the kT algorithm.
+  /// @author James Monk
+  class CDF_2007_S7057202 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2007_S7057202()
+      : Analysis("CDF_2007_S7057202"),
+        _minY(0.1), _maxY(0.7), _jetMinPT(54.0*GeV)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      //setSqrtS(1960*GeV);
+      const FinalState fs;
+      addProjection(FastJets(fs, FastJets::KT, 0.5), "JetsD05");
+      addProjection(FastJets(fs, FastJets::KT, 0.7), "JetsD07");
+      addProjection(FastJets(fs, FastJets::KT, 1.0), "JetsD10");
+      setNeedsCrossSection(true);
+    }
+    
+    
+    //@}
+
+    
+    /// @name Analysis methods
+    //@{
+
+    /// Book histos and set counters for number of events passed in each one
+    void init() {
+      _histoD05 = bookHistogram1D(6, 1, 1);
+      _histoD10 = bookHistogram1D(7, 1, 1);
+      _binnedHistosD07.addHistogram(  0, 0.1, bookHistogram1D(1, 1, 1));
+      _binnedHistosD07.addHistogram(0.1, 0.7, bookHistogram1D(2, 1, 1));
+      _binnedHistosD07.addHistogram(0.7, 1.1, bookHistogram1D(3, 1, 1));
+      _binnedHistosD07.addHistogram(1.1, 1.6, bookHistogram1D(4, 1, 1));
+      _binnedHistosD07.addHistogram(1.6, 2.1, bookHistogram1D(5, 1, 1));
+      
+      size_t yind = 0;
+      for (vector<AIDA::IHistogram1D*>::const_iterator histIt = _binnedHistosD07.getHistograms().begin();
+           histIt != _binnedHistosD07.getHistograms().end(); ++histIt){
+        _eventsPassed[*histIt] = 0.0;
+        _yBinWidths[*histIt] = 2.0 * (_ybins[yind+1]-_ybins[yind]); 
+        ++yind;
+      }
+      _eventsPassed[_histoD05] = 0.0;
+      _yBinWidths[_histoD05] = 2.0*(-_ybins[1]+_ybins[2]);
+      _eventsPassed[_histoD10] = 0.0;
+      _yBinWidths[_histoD10] = 2.0*(-_ybins[1]+_ybins[2]);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const double weight = event.weight();    
+      
+      const PseudoJets jetListD07 = applyProjection<FastJets>(event, "JetsD07").pseudoJets();
+      set< IHistogram1D*> passed;
+      /// @todo Use Jet interface rather than FastJet:PseudoJet
+      for (PseudoJets::const_iterator jet = jetListD07.begin(); jet != jetListD07.end(); ++jet) {
+        const double pt = jet->perp();
+        if (pt > _jetMinPT) {
+          AIDA::IHistogram1D* histo = _binnedHistosD07.fill(fabs(jet->rapidity()), pt, weight);
+          if (histo != 0) {
+            if (histo->coordToIndex(pt) != IAxis::OVERFLOW_BIN) {
+              passed.insert(histo);
+              _eventsPassed[histo] += weight;
+            }
+          }
+        }
+      }
+      
+      /// @todo Use Jet interface rather than FastJet:PseudoJet    
+      const PseudoJets jetListD05 = applyProjection<FastJets>(event, "JetsD05").pseudoJets();
+      for (PseudoJets::const_iterator jet = jetListD05.begin(); jet != jetListD05.end(); ++jet) {
+        const double pt = jet->perp();
+        if (pt > _jetMinPT) {
+          double rap = fabs(jet->rapidity());
+          if (rap >= _minY && rap < _maxY){
+            _histoD05->fill(pt, weight);
+            if (_histoD05->coordToIndex(pt) != IAxis::OVERFLOW_BIN){
+              passed.insert(_histoD05);
+              _eventsPassed[_histoD05] += weight;
+            }
+          }
+        }
+      }
+      
+      /// @todo Use Jet interface rather than FastJet:PseudoJet
+      const PseudoJets jetListD10 = applyProjection<FastJets>(event, "JetsD10").pseudoJets();
+      for (PseudoJets::const_iterator jet = jetListD10.begin(); jet != jetListD10.end(); ++jet){
+        const double pt = jet->perp();
+        if (pt > _jetMinPT) {
+          double rap = fabs(jet->rapidity());
+          if (rap >= _minY && rap < _maxY){
+            _histoD10->fill(pt, weight);
+            if (_histoD10->coordToIndex(pt) != IAxis::OVERFLOW_BIN){
+              passed.insert(_histoD10);
+              _eventsPassed[_histoD10] += weight;
+            }
+          }
+        }
+      }
+    }  
+    
+    
+    // Normalise histograms to cross-section
+    void finalize() {
+      const double xSecPerEvent = crossSection()/nanobarn / sumOfWeights();
+      getLog() << Log::INFO << "Cross-section = " << crossSection()/nanobarn << " nb" << endl;
+      
+      for (map<IHistogram1D*,double>::iterator histIt = _eventsPassed.begin(),
+             histJt = _yBinWidths.begin(); histIt != _eventsPassed.end(); ++histIt, ++histJt) {
+        IHistogram1D* hist = histIt->first;
+        const double xSec = xSecPerEvent * histIt->second / histJt->second;
+        normalize(hist, xSec);
+      }
+    }
+    
+        //@}
+    
+  private:
+
+    /// Rapidity range of histograms for R=0.05 and R=1 kt jets
+    const double _minY, _maxY;
+        
+    /// Min jet \f$ p_T \f$ cut.
+    /// @todo Make static const and UPPERCASE?
+    const double _jetMinPT;
+    
+    /// Counter for the number of events analysed (actually the sum of weights, hence double).
+    double _eventsTried;
+
+    /// @name Histograms
+    //@{
+    /// The number of events in each histogram
+    map<AIDA::IHistogram1D*, double> _eventsPassed;
+
+    /// The y bin width of each histogram
+    map<AIDA::IHistogram1D*, double> _yBinWidths;
+
+    /// The y bin edge values
+    static const double _ybins[6];
+
+    /// Histograms in different eta regions
+    BinnedHistogram<double> _binnedHistosD07;
+
+    // Single histogram for the \f$R=0.5\f$ \f$k_\perp\f$ jets
+    AIDA::IHistogram1D* _histoD05;
+
+    // Single histogram for the \f$R=1.0\f$ \f$k_\perp\f$ jets
+    AIDA::IHistogram1D* _histoD10;
+    //@}
+
+  };
+
+
+  // Initialise static
+  const double CDF_2007_S7057202::_ybins[] = { 0.0, 0.1, 0.7, 1.1, 1.6, 2.1 };
+  
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2007_S7057202> plugin_CDF_2007_S7057202;
+
+}

Copied: trunk/src/Analyses/CDF_2008_LEADINGJETS.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_LEADINGJETS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_LEADINGJETS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_LEADINGJETS.cc)
@@ -0,0 +1,244 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /* CDF Run II underlying event in leading jet events
+   * @author Hendrik Hoeth
+   * 
+   * Rick Field's measurement of the underlying event in "leading jet" events.
+   * The leading jet (CDF midpoint R=0.7) must be within |eta|<2 and defines
+   * the "toward" phi direction. Particles are selected in |eta|<1. For the pT
+   * related observables there is a pT>0.5 GeV cut. For sum(ET) there is no pT cut.
+   * 
+   * 
+   * @par Run conditions
+   * 
+   * @arg \f$ \sqrt{s} = \f$ 1960 GeV
+   * @arg Run with generic QCD events.
+   * @arg Set particles with c*tau > 10 mm stable
+   * @arg Several \f$ p_\perp^\text{min} \f$ cutoffs are probably required to fill the profile histograms:
+   *   @arg \f$ p_\perp^\text{min} = \f$ 0 (min bias), 10, 20, 50, 100, 150 GeV
+   *   @arg The corresponding merging points are at \f$ p_T = \f$ 0, 30, 50, 80, 130, 180 GeV
+   * 
+   */ 
+  class CDF_2008_LEADINGJETS : public Analysis {
+  public:
+    
+    /// @name Constructors etc.
+    //@{
+    
+    CDF_2008_LEADINGJETS()
+      : Analysis("CDF_2008_LEADINGJETS")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Final state for the jet finding
+      const FinalState fsj(-4.0, 4.0, 0.0*GeV);
+      addProjection(fsj, "FSJ");
+      addProjection(FastJets(fsj, FastJets::CDFMIDPOINT, 0.7), "MidpointJets");
+      
+      // Final state for the sum(ET) distributions
+      const FinalState fs(-1.0, 1.0, 0.0*GeV);
+      addProjection(fs, "FS");
+      
+      // Charged final state for the distributions
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      addProjection(cfs, "CFS");
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() {
+      _hist_pnchg      = bookProfile1D( 1, 1, 1);
+      _hist_pmaxnchg   = bookProfile1D( 2, 1, 1);
+      _hist_pminnchg   = bookProfile1D( 3, 1, 1);
+      _hist_pdifnchg   = bookProfile1D( 4, 1, 1);
+      _hist_pcptsum    = bookProfile1D( 5, 1, 1);
+      _hist_pmaxcptsum = bookProfile1D( 6, 1, 1);
+      _hist_pmincptsum = bookProfile1D( 7, 1, 1);
+      _hist_pdifcptsum = bookProfile1D( 8, 1, 1);
+      _hist_pcptave    = bookProfile1D( 9, 1, 1);
+      //_hist_onchg   = bookProfile1D( 1, 1, 1, "Overall number of charged particles");
+      //_hist_ocptsum = bookProfile1D( 2, 1, 1, "Overall charged $p_\\perp$ sum");
+      //_hist_oetsum  = bookProfile1D( 3, 1, 1, "Overall $E_\\perp$ sum");
+    }
+    
+    
+    // Do the analysis
+    void analyze(const Event& e) {
+      const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
+      if (fsj.particles().size() < 1) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+      
+      const Jets& jets = applyProjection<FastJets>(e, "MidpointJets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
+      
+      // We require the leading jet to be within |eta|<2
+      if (jets.size() < 1 || fabs(jets[0].momentum().eta()) >= 2) {
+        getLog() << Log::DEBUG << "Failed leading jet cut" << endl;
+        vetoEvent;
+      }
+      
+      const double jetphi = jets[0].momentum().phi();
+      const double jeteta = jets[0].momentum().eta();
+      const double jetpT  = jets[0].momentum().perp();
+      getLog() << Log::DEBUG << "Leading jet: pT = " << jetpT
+               << ", eta = " << jeteta << ", phi = " << jetphi << endl;
+      
+      // Get the event weight
+      const double weight = e.weight();
+      
+      // Get the final states to work with for filling the distributions
+      const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+      
+      size_t numOverall(0),     numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0)  ;
+      double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
+      //double EtSumOverall(0.0), EtSumToward(0.0), EtSumTrans1(0.0), EtSumTrans2(0.0), EtSumAway(0.0);
+      double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
+      
+      // Calculate all the charged stuff
+      foreach (const Particle& p, cfs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double pT = p.momentum().pT();
+        const double phi = p.momentum().phi();
+        
+        /// @todo The jet and particle phis should now be the same: check
+        double rotatedphi = phi - jetphi;
+        while (rotatedphi < 0) rotatedphi += 2*PI;
+        
+        ptSumOverall += pT;
+        ++numOverall;
+        if (pT > ptMaxOverall) {
+          ptMaxOverall = pT;
+        }
+        
+        if (dPhi < PI/3.0) {
+          ptSumToward += pT;
+          ++numToward;
+          if (pT > ptMaxToward)
+            ptMaxToward = pT;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            ptSumTrans1 += pT;
+            ++numTrans1;
+            if (pT > ptMaxTrans1)
+              ptMaxTrans1 = pT;
+          }
+          else {
+            ptSumTrans2 += pT;
+            ++numTrans2;
+            if (pT > ptMaxTrans2)
+              ptMaxTrans2 = pT;
+          }
+        }
+        else {
+          ptSumAway += pT;
+          ++numAway;
+          if (pT > ptMaxAway)
+            ptMaxAway = pT;
+        }
+      } // end charged particle loop
+      
+      
+      #if 0   
+      /// @todo Enable this part when we have the numbers from Rick Field
+      
+      // And now the same business for all particles (including neutrals)
+      foreach (const Particle& p, fs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double ET = p.momentum().Et();
+        const double phi = p.momentum().azimuthalAngle();
+        /// @todo Check that phi mappings really match (they should now)
+        double rotatedphi = phi - jetphi;
+        while (rotatedphi < 0) rotatedphi += 2*PI;
+        
+        EtSumOverall += ET;
+        
+        if (deltaPhi < PI/3.0) {
+          EtSumToward += ET;
+        }
+        else if (deltaPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            EtSumTrans1 += ET;
+          }
+          else {
+            EtSumTrans2 += ET;
+          }
+        }
+        else {
+          EtSumAway += ET;
+        }
+      } // end all particle loop
+      #endif
+      
+      
+      // Fill the histograms
+      //_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
+      _hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
+      _hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
+      //_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
+      
+      //_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
+      _hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
+      _hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
+      //_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
+      
+      //if (numToward > 0) {
+      //  _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
+      //  _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
+      //}
+      if ((numTrans1+numTrans2) > 0) {
+        _hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
+        //_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
+      }
+      //if (numAway > 0) {
+      //  _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
+      //  _hist_acptmax->fill(jetpT, ptMaxAway, weight);
+      //}
+    }
+    
+    
+    void finalize() {  
+      //
+    }
+    
+    //@}
+
+
+  private:
+
+    AIDA::IProfile1D *_hist_pnchg;
+    AIDA::IProfile1D *_hist_pmaxnchg;
+    AIDA::IProfile1D *_hist_pminnchg;
+    AIDA::IProfile1D *_hist_pdifnchg;
+    AIDA::IProfile1D *_hist_pcptsum;
+    AIDA::IProfile1D *_hist_pmaxcptsum;
+    AIDA::IProfile1D *_hist_pmincptsum;
+    AIDA::IProfile1D *_hist_pdifcptsum;
+    AIDA::IProfile1D *_hist_pcptave;
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_LEADINGJETS> plugin_CDF_2008_LEADINGJETS;
+
+}

Copied: trunk/src/Analyses/CDF_2008_NOTE_9351.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_NOTE_9351.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_NOTE_9351.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_NOTE_9351.cc)
@@ -0,0 +1,236 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/ChargedLeptons.hh"
+
+namespace Rivet {
+
+
+  /* @brief CDF Run II underlying event in Drell-Yan
+   * @author Hendrik Hoeth
+   * 
+   * Measurement of the underlying event in Drell-Yan Z/gamma->e+e-
+   * and Z/gamma->mu+mu- events. The reconstructed Z defines the
+   * phi orientation. A Z mass window cut is applied.
+   * 
+   * 
+   * @par Run conditions
+   * 
+   * @arg \f$ \sqrt{s} = \f$ 1960 GeV
+   * @arg produce Drell-Yan events
+   * @arg Set particles with c*tau > 10 mm stable
+   * @arg Z decay mode: Z -> e+e- and Z -> mu+mu-
+   * @arg gamma decay mode: gamma -> e+e- and gamma -> mu+mu-
+   * @arg minimum invariant mass of the fermion pair coming from the Z/gamma: 70 GeV
+   * 
+   */ 
+  class CDF_2008_NOTE_9351 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.5 \f$ GeV.
+    CDF_2008_NOTE_9351()
+      : Analysis("CDF_2008_NOTE_9351")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      const ChargedFinalState clfs(-1.0, 1.0, 20*GeV);
+      addProjection(cfs, "FS");
+      addProjection(ChargedLeptons(clfs), "CL");
+    }
+    
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    // Book histograms
+    void init() {
+      _hist_tnchg      = bookProfile1D( 1, 1, 1);
+      _hist_pnchg      = bookProfile1D( 2, 1, 1);
+      _hist_pmaxnchg   = bookProfile1D( 3, 1, 1);
+      _hist_pminnchg   = bookProfile1D( 4, 1, 1);
+      _hist_pdifnchg   = bookProfile1D( 5, 1, 1);
+      _hist_anchg      = bookProfile1D( 6, 1, 1);
+      
+      _hist_tcptsum    = bookProfile1D( 7, 1, 1);
+      _hist_pcptsum    = bookProfile1D( 8, 1, 1);
+      _hist_pmaxcptsum = bookProfile1D( 9, 1, 1);
+      _hist_pmincptsum = bookProfile1D(10, 1, 1);
+      _hist_pdifcptsum = bookProfile1D(11, 1, 1);
+      _hist_acptsum    = bookProfile1D(12, 1, 1);
+      
+      _hist_tcptave    = bookProfile1D(13, 1, 1);
+      _hist_pcptave    = bookProfile1D(14, 1, 1);
+      _hist_acptave    = bookProfile1D(15, 1, 1);
+      
+      _hist_tcptmax    = bookProfile1D(16, 1, 1);
+      _hist_pcptmax    = bookProfile1D(17, 1, 1);
+      _hist_acptmax    = bookProfile1D(18, 1, 1);
+      
+      _hist_zptvsnchg  = bookProfile1D(19, 1, 1);
+      _hist_cptavevsnchg = bookProfile1D(20, 1, 1);
+      _hist_cptavevsnchgsmallzpt = bookProfile1D(21, 1, 1);
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 1) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+      
+      // Get the event weight
+      const double weight = e.weight();
+      
+      // Get the leptons
+      const ParticleVector& leptons = applyProjection<ChargedLeptons>(e, "CL").chargedLeptons();
+      
+      // We want exactly two leptons of the same flavour.
+      getLog() << Log::DEBUG << "lepton multiplicity = " << leptons.size() << endl;
+      if (leptons.size() != 2 || leptons[0].pdgId() != -leptons[1].pdgId() )
+        vetoEvent;
+      
+      // Lepton pT > 20 GeV
+      if (leptons[0].momentum().pT() <= 20 || leptons[1].momentum().pT() <= 20)
+        vetoEvent;
+      
+      FourVector dilepton = leptons[0].momentum() + leptons[1].momentum();
+      
+      // Lepton pair should have an invariant mass between 70 and 110 and |eta|<6
+      if (mass(dilepton) < 70 || mass(dilepton) > 110 || fabs(pseudorapidity(dilepton)) >= 6)
+        vetoEvent;
+      getLog() << Log::DEBUG << "dilepton mass = " << mass(dilepton) << endl;
+      getLog() << Log::DEBUG << "dilepton pT   = " << pT(dilepton) << endl;
+      
+      
+      // Calculate the observables
+      size_t   numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0);
+      double ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
+      double ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
+      const double phiZ = azimuthalAngle(dilepton);
+      const double pTZ  = pT(dilepton);
+      /// @todo Replace with foreach
+      for (ParticleVector::const_iterator p = fs.particles().begin(); p != fs.particles().end(); ++p) {
+        // Don't use the leptons
+        /// @todo Replace with PID::isLepton
+        if (abs(p->pdgId()) < 20) continue;
+        
+        const double dPhi = deltaPhi(p->momentum().phi(), phiZ);
+        const double pT = p->momentum().pT();
+        double rotatedphi = p->momentum().phi() - phiZ;
+        while (rotatedphi < 0) rotatedphi += 2*PI;
+        
+        if (dPhi < PI/3.0) {
+          ptSumToward += pT;
+          ++numToward;
+          if (pT > ptMaxToward)
+            ptMaxToward = pT;
+        } else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            ptSumTrans1 += pT;
+            ++numTrans1;
+            if (pT > ptMaxTrans1)
+              ptMaxTrans1 = pT;
+          }
+          else {
+            ptSumTrans2 += pT;
+            ++numTrans2;
+            if (pT > ptMaxTrans2)
+              ptMaxTrans2 = pT;
+          }
+        } else {
+          ptSumAway += pT;
+          ++numAway;
+          if (pT > ptMaxAway)
+            ptMaxAway = pT;
+        }
+        // We need to subtract the two leptons from the number of particles to get the correct multiplicity
+        _hist_cptavevsnchg->fill(numParticles-2, pT, weight);
+        if (pTZ < 10)
+          _hist_cptavevsnchgsmallzpt->fill(numParticles-2, pT, weight);
+      }
+      
+      // Fill the histograms
+      _hist_tnchg->fill(pTZ, numToward/(4*PI/3), weight);
+      _hist_pnchg->fill(pTZ, (numTrans1+numTrans2)/(4*PI/3), weight);
+      _hist_pmaxnchg->fill(pTZ, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pminnchg->fill(pTZ, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pdifnchg->fill(pTZ, abs(numTrans1-numTrans2)/(2*PI/3), weight);
+      _hist_anchg->fill(pTZ, numAway/(4*PI/3), weight);
+      
+      _hist_tcptsum->fill(pTZ, ptSumToward/(4*PI/3), weight);
+      _hist_pcptsum->fill(pTZ, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
+      _hist_pmaxcptsum->fill(pTZ, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pmincptsum->fill(pTZ, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pdifcptsum->fill(pTZ, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
+      _hist_acptsum->fill(pTZ, ptSumAway/(4*PI/3), weight);
+      
+      if (numToward > 0) {
+        _hist_tcptave->fill(pTZ, ptSumToward/numToward, weight);
+        _hist_tcptmax->fill(pTZ, ptMaxToward, weight);
+      }
+      if ((numTrans1+numTrans2) > 0) {
+        _hist_pcptave->fill(pTZ, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
+        _hist_pcptmax->fill(pTZ, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
+      }
+      if (numAway > 0) {
+        _hist_acptave->fill(pTZ, ptSumAway/numAway, weight);
+        _hist_acptmax->fill(pTZ, ptMaxAway, weight);
+      }
+      
+      // We need to subtract the two leptons from the number of particles to get the correct multiplicity
+      _hist_zptvsnchg->fill(numParticles-2, pTZ, weight);
+    }
+    
+    
+    void finalize() { 
+      //
+    }
+    
+    //@}
+
+  private:
+
+    AIDA::IProfile1D *_hist_tnchg;
+    AIDA::IProfile1D *_hist_pnchg;
+    AIDA::IProfile1D *_hist_pmaxnchg;
+    AIDA::IProfile1D *_hist_pminnchg;
+    AIDA::IProfile1D *_hist_pdifnchg;
+    AIDA::IProfile1D *_hist_anchg;
+    AIDA::IProfile1D *_hist_tcptsum;
+    AIDA::IProfile1D *_hist_pcptsum;
+    AIDA::IProfile1D *_hist_pmaxcptsum;
+    AIDA::IProfile1D *_hist_pmincptsum;
+    AIDA::IProfile1D *_hist_pdifcptsum;
+    AIDA::IProfile1D *_hist_acptsum;
+    AIDA::IProfile1D *_hist_tcptave;
+    AIDA::IProfile1D *_hist_pcptave;
+    AIDA::IProfile1D *_hist_acptave;
+    AIDA::IProfile1D *_hist_tcptmax;
+    AIDA::IProfile1D *_hist_pcptmax;
+    AIDA::IProfile1D *_hist_acptmax;
+    AIDA::IProfile1D *_hist_zptvsnchg;
+    AIDA::IProfile1D *_hist_cptavevsnchg;
+    AIDA::IProfile1D *_hist_cptavevsnchgsmallzpt;
+
+  };
+
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_NOTE_9351> plugin_CDF_2008_NOTE_9351;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S7540469.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S7540469.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S7540469.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S7540469.cc)
@@ -0,0 +1,198 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/IdentifiedFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement differential Z/gamma* + jet + X cross sections
+  /// @author Frank Siegert
+  class CDF_2008_S7540469 : public Analysis {
+
+  public:
+
+    /// Default constructor.
+    CDF_2008_S7540469()
+      : Analysis("CDF_2008_S7540469")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      // Full final state
+      FinalState fs(-5.0, 5.0);
+      addProjection(fs, "FS");
+      
+      // Leading electrons in tracking acceptance
+      IdentifiedFinalState elfs(-5.0, 5.0, 25.0*GeV);
+      elfs.acceptIdPair(ELECTRON);
+      addProjection(elfs, "LeadingElectrons");
+    } 
+    
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_jet_multiplicity = bookHistogram1D(1, 1, 1);
+      _h_jet_pT_cross_section_incl_1jet = bookHistogram1D(2, 1, 1);
+      _h_jet_pT_cross_section_incl_2jet = bookHistogram1D(3, 1, 1);
+    }
+    
+    
+    /// Do the analysis 
+    void analyze(const Event & event) {
+      const double weight = event.weight();
+      
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.empty()) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no final state pair found " << endl;
+        vetoEvent;
+      }
+      
+      // Find the Z candidates
+      const FinalState & electronfs = applyProjection<FinalState>(event, "LeadingElectrons");
+      std::vector<std::pair<Particle, Particle> > Z_candidates;
+      ParticleVector all_els=electronfs.particles();
+      for (size_t i=0; i<all_els.size(); ++i) {
+        for (size_t j=i+1; j<all_els.size(); ++j) {
+          bool candidate=true;
+          double mZ=FourMomentum(all_els[i].momentum()+all_els[j].momentum()).mass()/GeV;
+          if (mZ<66.0 || mZ>116.0) {
+            candidate=false;
+          }
+          double abs_eta_0=fabs(all_els[i].momentum().pseudorapidity());
+          double abs_eta_1=fabs(all_els[j].momentum().pseudorapidity());
+          if (abs_eta_1<abs_eta_0) {
+            double tmp=abs_eta_0;
+            abs_eta_0=abs_eta_1;
+            abs_eta_1=tmp;
+          }
+          if (abs_eta_0>1.0) {
+            candidate=false;
+          }
+          if (!(abs_eta_1<1.0 || (abs_eta_1>1.2 && abs_eta_1<2.8))) {
+            candidate=false;
+          }
+          if (candidate) {
+            Z_candidates.push_back(make_pair(all_els[i], all_els[j]));
+          }
+        }
+      }
+      if (Z_candidates.size() != 1) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no unique electron pair found " << endl;
+        vetoEvent;
+      }
+      
+      // Now build the jets on a FS without the electrons from the Z
+      // (including their QED radiation)
+      ParticleVector jetparts;
+      foreach (const Particle& p, fs.particles()) {
+        bool copy = true;
+        if (p.pdgId() == PHOTON) {
+          FourMomentum p_e0 = Z_candidates[0].first.momentum();
+          FourMomentum p_e1 = Z_candidates[0].second.momentum();
+          FourMomentum p_P = p.momentum();
+          if (deltaR(p_e0.pseudorapidity(), p_e0.azimuthalAngle(),
+                     p_P.pseudorapidity(), p_P.azimuthalAngle()) < 0.2) {
+            copy = false;
+          }
+          if (deltaR(p_e1.pseudorapidity(), p_e1.azimuthalAngle(),
+                     p_P.pseudorapidity(), p_P.azimuthalAngle()) < 0.2) {
+            copy = false;
+          }
+        } else {
+          if (p.genParticle().barcode()==Z_candidates[0].first.genParticle().barcode()) {
+            copy = false;
+          }
+          if (p.genParticle().barcode()==Z_candidates[0].second.genParticle().barcode()) {
+            copy = false;
+          }
+        }
+        if (copy) jetparts.push_back(p);
+      }
+      /// @todo Allow proj creation w/o FS as ctor arg, so that calc can be used more easily.
+      FastJets jetpro(fs, FastJets::CDFMIDPOINT, 0.7);
+      jetpro.calc(jetparts);
+      
+      // Take jets with pt > 30, |eta| < 2.1:
+      /// @todo Make this neater, using the JetAlg interface and the built-in sorting
+      const Jets& jets = jetpro.jets();
+      Jets jets_cut;
+      foreach (const Jet& j, jets) {
+        if (j.momentum().pT()/GeV > 30.0 && fabs(j.momentum().pseudorapidity()) < 2.1) {
+          jets_cut.push_back(j);
+        }
+      }
+      getLog() << Log::DEBUG << "Num jets above 30 GeV = " << jets_cut.size() << endl;
+      
+      // Return if there are no jets:
+      if (jets_cut.empty()) {
+        getLog() << Log::DEBUG << "No jets pass cuts " << endl;
+        vetoEvent;
+      }
+      
+      // Sort by pT:
+      sort(jets_cut.begin(), jets_cut.end(), cmpJetsByPt);
+      
+      // cut on Delta R between jet and electrons
+      foreach (const Jet& j, jets_cut) {
+        Particle el = Z_candidates[0].first;
+        if (deltaR(el.momentum().pseudorapidity(), el.momentum().azimuthalAngle(),
+                   j.momentum().pseudorapidity(), j.momentum().azimuthalAngle()) < 0.7) {
+          vetoEvent;
+        }
+        el = Z_candidates[0].second;
+        if (deltaR(el.momentum().pseudorapidity(), el.momentum().azimuthalAngle(),
+                   j.momentum().pseudorapidity(), j.momentum().azimuthalAngle()) < 0.7) {
+          vetoEvent;
+        }
+      }
+      
+      for (size_t njet=1; njet<=jets_cut.size(); ++njet) {
+        _h_jet_multiplicity->fill(njet, weight);
+      }
+      foreach (const Jet& j, jets_cut) {
+        if (jets_cut.size()>0) {
+          _h_jet_pT_cross_section_incl_1jet->fill(j.momentum().pT(), weight);
+        }
+        if (jets_cut.size()>1) {
+          _h_jet_pT_cross_section_incl_2jet->fill(j.momentum().pT(), weight);
+        }
+      }
+    }
+    
+    
+    /// Rescale histos
+    void finalize() {
+      const double invlumi = crossSection()/femtobarn/sumOfWeights();
+      scale(_h_jet_multiplicity, invlumi);
+      scale(_h_jet_pT_cross_section_incl_1jet, invlumi);
+      scale(_h_jet_pT_cross_section_incl_2jet, invlumi);
+    }
+
+    //@}
+
+  private:
+    
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_jet_multiplicity;
+    AIDA::IHistogram1D * _h_jet_pT_cross_section_incl_1jet;
+    AIDA::IHistogram1D * _h_jet_pT_cross_section_incl_2jet;
+    //@}
+    
+  };
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S7540469> plugin_CDF_2008_S7540469;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S7541902.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S7541902.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S7541902.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S7541902.cc)
@@ -0,0 +1,218 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/InvMassFinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/ChargedLeptons.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/ChargedLeptons.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/SVertex.hh"
+#include <algorithm>
+
+namespace Rivet {
+
+
+  /// @brief CDF jet pT and multiplicity distributions in W + jets events
+  ///
+  /// This CDF analysis provides jet pT distributions for 4 jet multiplicity bins
+  /// as well as the jet multiplicity distribution in W + jets events.
+  /// e-Print: arXiv:0711.4044 [hep-ex]
+  class CDF_2008_S7541902 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2008_S7541902()
+      : Analysis("CDF_2008_S7541902"),    
+        _electronETCut(20.0*GeV), _electronETACut(1.1),
+        _eTmissCut(30.0*GeV), _mTCut(20.0*GeV),
+        _jetEtCutA(20.0*GeV),  _jetEtCutB(25.0*GeV), _jetETA(2.0),
+        _xpoint(1960.)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      // Basic FS
+      FinalState fs(-3.6, 3.6);
+      addProjection(fs, "FS");
+      
+      // Create a final state with any e-nu pair with invariant mass 65 -> 95 GeV and ET > 20 (W decay products)
+      vector<pair<long,long> > vids;
+      vids += make_pair(ELECTRON, NU_EBAR);
+      vids += make_pair(POSITRON, NU_E);
+      FinalState fs2(-3.6, 3.6, 20*GeV);
+      InvMassFinalState invfs(fs2, vids, 65*GeV, 95*GeV);
+      addProjection(invfs, "INVFS");
+      
+      // Make a final state without the W decay products for jet clustering
+      VetoedFinalState vfs(fs);
+      vfs.addVetoOnThisFinalState(invfs);
+      addProjection(vfs, "VFS");
+      addProjection(FastJets(vfs, FastJets::CDFJETCLU, 0.4), "Jets");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      for (int i = 0 ; i < 4 ; ++i) {
+        _histJetEt[i] = bookHistogram1D(i+1, 1, 1);
+        _histJetMultRatio[i] = bookDataPointSet(5 , 1, i+1);
+        _histJetMult[i]   = bookHistogram1D(i+6, 1, 1);
+      } 
+      _histJetMultNorm = bookHistogram1D("norm", 1, _xpoint, _xpoint+1.);
+    }
+    
+
+    /// Do the analysis
+    void analyze(const Event& event) {
+      // Get the W decay products (electron and neutrino)
+      const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
+      const ParticleVector&  wDecayProducts = invMassFinalState.particles();
+      
+      FourMomentum electronP, neutrinoP;
+      bool gotElectron(false), gotNeutrino(false);
+      foreach (const Particle& p, wDecayProducts) {
+        FourMomentum p4 = p.momentum();
+        if (p4.Et() > _electronETCut && fabs(p4.eta()) < _electronETACut && abs(p.pdgId()) == ELECTRON) {
+          electronP = p4;
+          gotElectron = true;
+        }
+        else if (p4.Et() > _eTmissCut && abs(p.pdgId()) == NU_E) {
+          neutrinoP = p4;
+          gotNeutrino = true;
+        }
+      }
+      
+      // Veto event if the electron or MET cuts fail
+      if (!gotElectron || !gotNeutrino) vetoEvent;
+      
+      // Veto event if the MTR cut fails
+      double mT2 = 2.0 * ( electronP.pT()*neutrinoP.pT() - electronP.px()*neutrinoP.px() - electronP.py()*neutrinoP.py() );
+      if (sqrt(mT2) < _mTCut ) vetoEvent;
+      
+      // Get the jets
+      const JetAlg& jetProj = applyProjection<FastJets>(event, "Jets");
+      Jets theJets = jetProj.jetsByEt(_jetEtCutA);
+      size_t njetsA(0), njetsB(0);
+      foreach (const Jet& j, theJets) {
+        const FourMomentum pj = j.momentum();
+        if (fabs(pj.rapidity()) < _jetETA) {
+          // Fill differential histograms for top 4 jets with Et > 20
+          if (njetsA < 4 && pj.Et() > _jetEtCutA) {
+            ++njetsA;
+            _histJetEt[njetsA-1]->fill(pj.Et(), event.weight());
+          }
+          // Count number of jets with Et > 25 (for multiplicity histograms)
+          if (pj.Et() > _jetEtCutB) ++njetsB;
+        }
+      }
+      
+      // Jet multiplicity
+      _histJetMultNorm->fill(_xpoint, event.weight());
+      for (size_t i = 1; i <= njetsB; ++i) {
+        _histJetMult[i-1]->fill(_xpoint, event.weight());
+        if (i == 4) break;
+      }
+    }
+    
+    
+
+    /// Finalize
+    void finalize() { 
+      const double xsec = crossSection()/sumOfWeights();
+      // Get the x-axis for the ratio plots
+      /// @todo Replace with autobooking etc. once YODA in place    
+      std::vector<double> xval; xval.push_back(_xpoint);
+      std::vector<double> xerr; xerr.push_back(.5);
+      // Fill the first ratio histogram using the special normalisation histogram for the total cross section
+      double ratio1to0 = 0.;
+      if (_histJetMultNorm->binHeight(0) > 0.) ratio1to0 = _histJetMult[0]->binHeight(0)/_histJetMultNorm->binHeight(0);
+      // Get the fractional error on the ratio histogram
+      double frac_err1to0 = 0.;
+      if (_histJetMult[0]->binHeight(0) > 0.)  frac_err1to0 = _histJetMult[0]->binError(0)/_histJetMult[0]->binHeight(0);
+      if (_histJetMultNorm->binHeight(0) > 0.) {
+        frac_err1to0 *= frac_err1to0;
+        frac_err1to0 += pow(_histJetMultNorm->binError(0)/_histJetMultNorm->binHeight(0),2.);
+        frac_err1to0 = sqrt(frac_err1to0);
+      }
+      
+      /// @todo Replace with autobooking etc. once YODA in place    
+      vector<double> yval[4]; yval[0].push_back(ratio1to0);
+      vector<double> yerr[4]; yerr[0].push_back(ratio1to0*frac_err1to0);
+      _histJetMultRatio[0]->setCoordinate(0,xval,xerr);
+      _histJetMultRatio[0]->setCoordinate(1,yval[0],yerr[0]);
+      for (int i = 0; i < 4; ++i) {
+        if (i < 3) {
+          float ratio = 0.0;
+          if (_histJetMult[i]->binHeight(0) > 0.0) ratio = _histJetMult[i+1]->binHeight(0)/_histJetMult[i]->binHeight(0);
+          float frac_err = 0.0;
+          if (_histJetMult[i]->binHeight(0) > 0.0) frac_err = _histJetMult[i]->binError(0)/_histJetMult[i]->binHeight(0);
+          if (_histJetMult[i+1]->binHeight(0) > 0.0) {
+            frac_err *= frac_err;
+            frac_err += pow(_histJetMult[i+1]->binError(0)/_histJetMult[i+1]->binHeight(0),2.);
+            frac_err = sqrt(frac_err);
+          }
+          yval[i+1].push_back(ratio);
+          yerr[i+1].push_back(ratio*frac_err);
+          _histJetMultRatio[i+1]->setCoordinate(0,xval,xerr);
+          _histJetMultRatio[i+1]->setCoordinate(1,yval[i+1],yerr[i+1]);
+        }
+        _histJetEt[i]->scale(xsec);
+        _histJetMult[i]->scale(xsec);
+      }
+      _histJetMultNorm->scale(xsec);
+    }
+
+    //@}
+
+
+  private:
+
+    /// @name Cuts 
+    //@{
+    /// Cut on the electron ET:
+    double _electronETCut;
+    /// Cut on the electron ETA:
+    double _electronETACut;   
+    /// Cut on the missing ET
+    double _eTmissCut;
+    /// Cut on the transverse mass squared
+    double _mTCut;
+    /// Cut on the jet ET for differential cross sections
+    double _jetEtCutA;
+    /// Cut on the jet ET for jet multiplicity
+    double _jetEtCutB;
+    /// Cut on the jet ETA
+    double _jetETA;
+    //@}    
+
+    double _xpoint;
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _histJetEt[4];
+    AIDA::IHistogram1D* _histJetMultNorm;
+    AIDA::IDataPointSet* _histJetMultRatio[4];
+    AIDA::IHistogram1D* _histJetMult[4];
+    //@}
+
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S7541902> plugin_CDF_2008_S7541902;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S7782535.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S7782535.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S7782535.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S7782535.cc)
@@ -0,0 +1,150 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/JetShape.hh"
+
+namespace Rivet {
+
+
+  /// Implementation of CDF RunII b-jet shape paper
+  class CDF_2008_S7782535 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    CDF_2008_S7782535()
+      : Analysis("CDF_2008_S7782535"),
+        _Rjet(0.7) , _NpTbins(4)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      const FinalState fs(-3.6, 3.6);
+      addProjection(fs, "FS");
+      // Veto (anti)neutrinos, and muons with pT above 1.0 GeV
+      VetoedFinalState vfs(fs);
+      vfs
+        .addVetoPairId(NU_E)
+        .addVetoPairId(NU_MU)
+        .addVetoPairId(NU_TAU)
+        .addVetoDetail(MUON, 1.0*GeV, MAXDOUBLE);
+      addProjection(vfs, "VFS");
+      addProjection(FastJets(vfs, FastJets::CDFMIDPOINT, 0.7), "Jets");
+      addProjection(JetShape(vfs, _jetaxes, 0.0, 0.7, 0.1, 0.3), "JetShape");
+      
+      // We don't attempt to model the following cuts:
+      //  * missing ET significance
+      //  * veto on additional vertices
+      //  * Z_vtx < 50 cm
+    }
+    
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _pTbins += 52, 80, 104, 142, 300;
+      // Book histograms
+      for (int i = 0; i < _NpTbins; ++i) {
+        _h_Psi_pT[i] = bookProfile1D(i+1, 2, 1);
+      }
+      _h_OneMinusPsi_vs_pT = bookDataPointSet(5, 1, 1);
+    }  
+    
+    
+    // Do the analysis
+    void analyze(const Event& event) {
+      // Get jets     
+      const Jets& jets = applyProjection<FastJets>(event, "Jets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jets.size() << endl;
+      
+      // Determine the central jet axes
+      _jetaxes.clear();
+      foreach (const Jet& j, jets) {
+        if (j.containsBottom()) {
+          // Only use central calorimeter jets
+          FourMomentum pjet = j.momentum();
+          if (pjet.pT()/GeV > _pTbins[0] && fabs(pjet.rapidity()) < 0.7) {
+            _jetaxes.push_back(pjet);
+          }
+        }
+      }
+      if (_jetaxes.empty())  {
+        getLog() << Log::DEBUG << "No b-jet axes in acceptance" << endl;
+        vetoEvent;
+      }
+      
+      // Determine jet shapes
+      const JetShape& js = applyProjection<JetShape>(event, "JetShape");
+      
+      /// @todo Replace with foreach
+      for (size_t jind = 0; jind < _jetaxes.size(); ++jind) {
+        // Put jet in correct pT bin
+        int jet_pt_bin = -1;
+        for (size_t i = 0; i < 4; ++i) {
+          if (inRange(_jetaxes[jind].pT(), _pTbins[i], _pTbins[i+1])) {
+            jet_pt_bin = i;
+            break;
+          }
+        }
+        if (jet_pt_bin > -1) {
+          // Fill each entry in profile
+          for (size_t rbin = 0; rbin < js.numBins(); ++rbin) {
+            const double rad_Psi = js.rMin() + (rbin+1.0)*js.interval();
+            /// @todo Yuck... JetShape's interface sucks
+            _h_Psi_pT[jet_pt_bin]->fill(rad_Psi/_Rjet, js.intJetShape(jind, rbin), event.weight() );
+          }
+        }
+      }
+      
+    }
+    
+    
+    /// Finalize
+    void finalize() {  
+      vector<double> y, ey;
+      for (size_t i = 0; i < _pTbins.size()-1; ++i) {
+        // Get entry for rad_Psi = 0.2 bin
+        AIDA::IProfile1D* ph_i = _h_Psi_pT[i];
+        y.push_back(1.0 - ph_i->binHeight(1));
+        ey.push_back(ph_i->binError(1));
+      }
+      _h_OneMinusPsi_vs_pT->setCoordinate(1, y, ey); 
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Analysis data
+    //@{
+    vector<FourMomentum> _jetaxes;
+    double _Rjet;
+    vector<double> _pTbins;
+    int _NpTbins;
+    //@}
+
+
+    /// @name Histograms
+    //@{
+    AIDA::IProfile1D* _h_Psi_pT[4];
+    AIDA::IDataPointSet* _h_OneMinusPsi_vs_pT;
+    //@}
+
+  };
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S7782535> plugin_CDF_2008_S7782535;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S7828950.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S7828950.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S7828950.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S7828950.cc)
@@ -0,0 +1,101 @@
+// -*- C++ -*-
+// CDF Run II inclusive jet cross-section using the midpoint algorithm.
+
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/BinnedHistogram.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /// CDF Run II inclusive jet cross-section using the Midpoint algorithm.
+  /// The analysis includes 1.1fb^-1 of CDF data and is the first with a 
+  /// cone algorithm to include the forward region of the detector.
+  /// arXiv:0807.2204 to be published in PRD
+  class CDF_2008_S7828950 : public Analysis {
+  public:
+    
+    /// @name Constructors etc.
+    //@{
+    
+    /// Constructor
+    CDF_2008_S7828950()
+      : Analysis("CDF_2008_S7828950")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      //setSqrtS(1960*GeV);
+      const FinalState fs;
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7, 62.0*GeV), "JetsM07");
+      setNeedsCrossSection(true);
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    // Book histos and set counters for number of events passed in each one
+    void init() {
+      /// @todo What actually are these histos showing?
+      _binnedHistosR07.addHistogram(  0, 0.1, bookHistogram1D(1, 1, 1));
+      _binnedHistosR07.addHistogram(0.1, 0.7, bookHistogram1D(2, 1, 1));
+      _binnedHistosR07.addHistogram(0.7, 1.1, bookHistogram1D(3, 1, 1));
+      _binnedHistosR07.addHistogram(1.1, 1.6, bookHistogram1D(4, 1, 1));
+      _binnedHistosR07.addHistogram(1.6, 2.1, bookHistogram1D(5, 1, 1));
+
+      size_t yind = 0;
+      foreach (AIDA::IHistogram1D* hist, _binnedHistosR07.getHistograms()) {
+        _yBinWidths[hist] = 2.0 * (_ybins[yind+1]-_ybins[yind]);
+        ++yind;
+      }
+    }
+
+
+    // Do the analysis
+    void analyze(const Event& event) {
+      const double weight = event.weight();    
+      foreach (const Jet& jet, applyProjection<FastJets>(event, "JetsM07").jets()) {
+        _binnedHistosR07.fill(fabs(jet.momentum().rapidity()), jet.momentum().pT(), weight);
+      }
+    }  
+
+
+    // Normalise histograms to cross-section
+    void finalize() {
+      foreach (AIDA::IHistogram1D* hist, _binnedHistosR07.getHistograms()) {
+        scale(hist, crossSection()/nanobarn/sumOfWeights()/_yBinWidths[hist]);
+      }
+    }
+
+    //@}
+
+
+  private:
+    
+    /// @name Histograms
+    //@{
+
+    /// The y bin width of each histogram
+    map<AIDA::IHistogram1D*, double> _yBinWidths;
+
+    /// The y bin edge values
+    /// @todo Yuck!
+    static const double _ybins[6];
+
+    /// Histograms in different eta regions
+    BinnedHistogram<double> _binnedHistosR07;
+
+  };
+
+
+  // Initialise static
+  const double CDF_2008_S7828950::_ybins[] = { 0.0, 0.1, 0.7, 1.1, 1.6, 2.1 };
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S7828950> plugin_CDF_2008_S7828950;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S8093652.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S8093652.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S8093652.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S8093652.cc)
@@ -0,0 +1,82 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/BinnedHistogram.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+  class CDF_2008_S8093652 : public Analysis {
+
+  public:
+
+    /// @name Construction
+    //@{
+
+    /// Constructor
+    CDF_2008_S8093652()
+      : Analysis("CDF_2008_S8093652")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      FinalState fs;
+      FastJets conefinder(fs, FastJets::CDFMIDPOINT, 0.7);
+      addProjection(conefinder, "ConeFinder");
+    } 
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+    
+    /// Book histograms
+    void init() {
+      _h_m_dijet = bookHistogram1D(1, 1, 1);
+    }
+    
+
+    /// Do the analysis 
+    void analyze(const Event & e) {
+      const double weight = e.weight();
+      
+      const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinder");
+      const Jets& jets = jetpro.jetsByPt();
+      
+      if (jets.size() < 2) vetoEvent;
+      
+      const FourMomentum j0(jets[0].momentum());
+      const FourMomentum j1(jets[1].momentum());
+      if (fabs(j1.rapidity()) > 1.0 || fabs(j0.rapidity()) > 1.0) {
+        vetoEvent;
+      }
+    
+      double mjj = FourMomentum(j0+j1).mass();
+      _h_m_dijet->fill(mjj, weight);
+    }
+    
+    
+    /// Finalize
+    void finalize() {
+      scale(_h_m_dijet, crossSection()/sumOfWeights());
+    }
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _h_m_dijet;
+    //@}
+    
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S8093652> plugin_CDF_2008_S8093652;
+
+}

Copied: trunk/src/Analyses/CDF_2008_S8095620.cc (from r1802, trunk/src/Analyses/CDF/CDF_2008_S8095620.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2008_S8095620.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2008_S8095620.cc)
@@ -0,0 +1,167 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/InvMassFinalState.hh"
+
+namespace Rivet {
+
+
+
+  /// Implementation of CDF Run II Z+b-jet cross section paper
+  class CDF_2008_S8095620 : public Analysis {
+  public:
+    
+    /// @name Constructors etc.
+    //@{
+    
+    /// Constructor.
+    /// jet cuts: |eta| <= 1.5
+    CDF_2008_S8095620()
+      : Analysis("CDF_2008_S8095620"), 
+        _Rjet(0.7), _JetPtCut(20.), _JetEtaCut(1.5),  
+        _sumWeightSelected(0.0)
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      
+      const FinalState fs(-3.6, 3.6);
+      addProjection(fs, "FS");
+      
+      // Create a final state with any e+e- or mu+mu- pair with 
+      // invariant mass 76 -> 106 GeV and ET > 20 (Z decay products)
+      vector<pair<long,long> > vids;
+      vids.push_back(make_pair(ELECTRON, POSITRON));
+      vids.push_back(make_pair(MUON, ANTIMUON));
+      FinalState fs2(-3.6, 3.6);
+      InvMassFinalState invfs(fs2, vids, 76*GeV, 106*GeV);
+      addProjection(invfs, "INVFS");
+      // Make a final state without the Z decay products for jet clustering
+      VetoedFinalState vfs(fs);
+      vfs.addVetoOnThisFinalState(invfs);
+      addProjection(vfs, "VFS");
+      addProjection(FastJets(vfs, FastJets::CDFMIDPOINT, 0.7), "Jets");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      // Book histograms
+      _dSdET    = bookHistogram1D(1, 1, 1);
+      _dSdETA   = bookHistogram1D(2, 1, 1);
+      _dSdNJet  = bookHistogram1D(3, 1, 1);
+      _dSdNbJet = bookHistogram1D(4, 1, 1);
+      _dSdZpT   = bookHistogram1D(5, 1, 1);
+    }  
+    
+   
+    // Do the analysis
+    void analyze(const Event& event) {
+      // Check we have an l+l- pair that passes the kinematic cuts
+      // Get the Z decay products (mu+mu- or e+e- pair)
+      const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
+      const ParticleVector&  ZDecayProducts =  invMassFinalState.particles();
+      
+      // make sure we have 2 Z decay products (mumu or ee) 
+      if (ZDecayProducts.size() < 2) vetoEvent;
+      _sumWeightSelected += event.weight(); 
+      // @todo: write out a warning if there are more than two decay products
+      FourMomentum Zmom = ZDecayProducts[0].momentum() +  ZDecayProducts[1].momentum();
+      
+      // Put all b-quarks in a vector
+      ParticleVector bquarks;
+      foreach (const GenParticle* p, particles(event.genEvent())) {
+        if (fabs(p->pdg_id()) == BQUARK) {
+          bquarks += Particle(*p);
+        }
+      }
+      
+      // Get jets 
+      const FastJets& jetpro = applyProjection<FastJets>(event, "Jets");
+      getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
+      
+      const PseudoJets& jets = jetpro.pseudoJetsByPt();
+      getLog() << Log::DEBUG << "jetlist size = " << jets.size() << endl;
+      
+      int numBJet = 0;
+      int numJet  = 0;
+      // for each b-jet plot the ET and the eta of the jet, normalise to the total cross section at the end
+      // for each event plot N jet and pT(Z), normalise to the total cross section at the end 
+      for (PseudoJets::const_iterator jt = jets.begin(); jt != jets.end(); ++jt) {
+        // select jets that pass the kinematic cuts
+        if (jt->perp() > _JetPtCut && fabs(jt->rapidity()) <= _JetEtaCut) {
+          numJet++;
+          // does the jet contain a b-quark?
+          bool bjet = false;
+          foreach (const Particle& bquark,  bquarks) {
+            if (deltaR(jt->rapidity(), jt->phi(), bquark.momentum().rapidity(),bquark.momentum().azimuthalAngle()) <= _Rjet) {
+              bjet = true;
+              break;
+            }
+          } // end loop around b-jets
+          if (bjet) {
+            numBJet++;
+            _dSdET->fill(jt->perp(),event.weight()); 
+            _dSdETA->fill(jt->rapidity(),event.weight()); 
+          }
+        }
+      } // end loop around jets
+      
+      if(numJet > 0) _dSdNJet->fill(numJet,event.weight());
+      if(numBJet > 0) {
+        _dSdNbJet->fill(numBJet,event.weight());
+        _dSdZpT->fill(Zmom.pT(),event.weight()); 
+      } 
+    }
+    
+  
+
+    // Finalize
+    void finalize() {  
+      // normalise histograms
+      // scale by 1 / the sum-of-weights of events that pass the Z cuts
+      // since the cross sections are normalized to the inclusive
+      // Z cross sections. 
+      double Scale = 1.0;
+      if (_sumWeightSelected != 0.0) Scale = 1.0/_sumWeightSelected;
+      _dSdET->scale(Scale);
+      _dSdETA->scale(Scale);
+      _dSdNJet->scale(Scale);
+      _dSdNbJet->scale(Scale);
+      _dSdZpT->scale(Scale);
+    }
+
+    //@}
+
+
+  private:
+
+    double _Rjet;
+    double _JetPtCut;
+    double _JetEtaCut;
+    double _sumWeightSelected;
+ 
+    //@{
+    /// Histograms
+    AIDA::IHistogram1D* _dSdET;
+    AIDA::IHistogram1D* _dSdETA;
+    AIDA::IHistogram1D* _dSdNJet; 
+    AIDA::IHistogram1D* _dSdNbJet; 
+    AIDA::IHistogram1D* _dSdZpT; 
+
+    //@}
+
+  };
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2008_S8095620> plugin_CDF_2008_S8095620;
+
+}

Copied: trunk/src/Analyses/CDF_2009_S8057893.cc (from r1802, trunk/src/Analyses/CDF/CDF_2009_S8057893.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2009_S8057893.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2009_S8057893.cc)
@@ -0,0 +1,82 @@
+#include "Rivet/Rivet.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Analyses/CDF_2009_S8057893.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  CDF_2009_S8057893::CDF_2009_S8057893() 
+    : Analysis("CDF_2009_S8057893")
+  {
+    setBeams(PROTON, ANTIPROTON);
+
+    const FinalState fsj(-4.0, 4.0, 0.0*GeV);
+    addProjection(fsj, "FSJ");
+    addProjection(FastJets(fsj, FastJets::CDFMIDPOINT, 1.0), "Jets");
+
+  }
+
+
+  void CDF_2009_S8057893::init() {
+
+
+  }
+
+
+  void CDF_2009_S8057893::analyze(const Event& event) {
+
+    const FastJets& jetpro = applyProjection<FastJets>(e, "MidpointJets");
+    const Jets& jets = jetpro.jetsByPt();
+    getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
+    if (jets.size() < 1) {
+      getLog() << Log::DEBUG << "Failed jet multiplicity cut" << endl;
+      vetoEvent;
+    }
+
+    // Email sent to authors:
+    // Okay, so here are the questions:
+
+    //  * What |eta| and pT_min acceptance cuts were used?
+    //  * Is the "cone algorithm" JETCLU or MIDPOINT? You refer to the old 1992 paper that defines 
+    //    JETCLU, but I thought Run II analyses were using the more IRC-safe midpoint algorithm.
+    //  * Effective min j1, j2 Et values?
+    //  * Definition of "require the two leading jets to be well-balanced in Et"?
+    //  * Definition of the complementary cones: per-jet for j1, j2? Otherwise, what is defn of 
+    //    "dijet axis" (since the two jet axes will not exactly match due to ISR and extra jets.) 
+    //    Complementary cones are same eta as jet, but phi +- 90 degrees? Radius of compl. cones 
+    //    = 1.0? Or defined in theta_c (not Lorentz invariant)?
+    //  * kT of tracks rel to jet axis for all jets, j1 & j2, or just j1?
+
+    // Herwig missing from plots!
+    // Data tables? More dijet mass bins (only 3 are shown, but 8 are mentioned)
+
+
+    // Only use tracks with kT > 0.3 GeV
+
+    // Low histo limit: kT_jet > 0.5 GeV
+
+    // Opening cone theta_c = 0.5 rad (in dijet rest frame)
+
+    //  95 < Mjj < 132 GeV
+    // 243 < Mjj < 323 GeV
+    // 428 < Mjj < 563 GeV
+    // < Mjj < GeV
+    // < Mjj < GeV
+    // < Mjj < GeV
+    // < Mjj < GeV
+    // < Mjj < GeV
+  }
+
+
+  void CDF_2009_S8057893::finalize() {
+
+  }
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2009_S8057893> plugin_CDF_2009_S8057893;
+
+}

Copied: trunk/src/Analyses/CDF_2009_S8233977.cc (from r1802, trunk/src/Analyses/CDF/CDF_2009_S8233977.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/CDF_2009_S8233977.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/CDF/CDF_2009_S8233977.cc)
@@ -0,0 +1,133 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+
+  /* @brief CDF Run II min-bias cross-section
+   * @author Hendrik Hoeth
+   * 
+   * Measurement of \f$ \langle p_T \rangle \f$ vs. \f$ n_\text{ch} \f$,
+   * the track \f$ p_T \f$ distribution, and the \f$ \sum E_T \f$ distribution.
+   * Particles are selected within |eta|<1 and with pT>0.4 GeV.
+   * There is no pT cut for the \f$ \sum E_T \f$ measurement.
+   * 
+   * @par Run conditions
+   * 
+   * @arg \f$ \sqrt{s} = \f$ 1960 GeV
+   * @arg Run with generic QCD events.
+   * @arg Set particles with c*tau > 10 mm stable
+   * 
+   */ 
+  class CDF_2009_S8233977 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// and \f$ p_T > 0.4 \f$ GeV.
+    CDF_2009_S8233977()
+      : Analysis("CDF_2009_S8233977")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      const FinalState fs(-1.0, 1.0, 0.0*GeV);
+      const ChargedFinalState cfs(-1.0, 1.0, 0.4*GeV);
+      addProjection(fs, "FS");
+      addProjection(cfs, "CFS");
+      setNeedsCrossSection(true);
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() {
+      _hist_pt_vs_multiplicity = bookProfile1D(1, 1, 1);
+      _hist_pt                 = bookHistogram1D(2, 1, 1);
+      _hist_sumEt              = bookHistogram1D(3, 1, 1);
+    }
+
+
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+      const size_t numParticles = cfs.particles().size();
+
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 1) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+
+      // Get the event weight
+      const double weight = e.weight();
+      
+      foreach (const Particle& p, cfs.particles()) {
+        const double pT = p.momentum().pT() / GeV;
+        _hist_pt_vs_multiplicity->fill(numParticles, pT, weight);
+        
+        // The weight for entries in the pT distribution should be weight/(pT*dPhi*dy).
+        //
+        // - dPhi = 2*PI
+        //
+        // - dy depends on the pT: They calculate y assuming the particle has the
+        //   pion mass and assuming that eta=1:
+        //   dy = 2 * 1/2 * ln [(sqrt(m^2 + (a+1)*pT^2) + a*pT) / (sqrt(m^2 + (a+1)*pT^2) - a*pT)]
+        //   with a = sinh(1).
+        //
+        // sinh(1) = 1.1752012
+        // m(charged pion)^2 = (139.57 MeV)^2 = 0.019479785 GeV^2
+        
+        //// FIXME: The pT and sum(ET) distributions look slightly different from
+        ////        Niccolo's Monte Carlo plots. Still waiting for his answer.
+        const double sinh1 = 1.1752012;
+        const double apT  = sinh1 * pT;
+        const double mPi = 139.57*MeV;
+        const double root = sqrt(mPi*mPi + (1+sinh1)*pT*pT);
+        const double dy = std::log((root+apT)/(root-apT));
+        const double dphi = 2*M_PI;
+        _hist_pt->fill(pT, weight/(pT*dphi*dy));
+      }
+      double sumEt = 0.;
+      foreach (const Particle& p, fs.particles()) {
+        sumEt += p.momentum().Et();
+      }
+      _hist_sumEt->fill(sumEt, weight);
+    }
+
+    
+    
+    /// Normalize histos
+    void finalize() {
+      // dphi * deta = 2*PI * 2
+      //// FIXME: We are normalizing to the data instead of MC cross-section
+      //scale(_hist_sumEt, crossSection()/millibarn/(4*M_PI*sumOfWeights()));
+      //scale(_hist_pt, crossSection()/barn*1e-24/sumOfWeights());
+      normalize(_hist_sumEt, 3.530);
+      normalize(_hist_pt, 2.513e-26);
+    }
+    
+    //@}
+
+
+  private:
+
+    AIDA::IProfile1D *_hist_pt_vs_multiplicity;
+    AIDA::IHistogram1D *_hist_pt;
+    AIDA::IHistogram1D *_hist_sumEt;
+
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<CDF_2009_S8233977> plugin_CDF_2009_S8233977;
+
+}

Copied: trunk/src/Analyses/D0_1996_S3214044.cc (from r1802, trunk/src/Analyses/D0/D0_1996_S3214044.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_1996_S3214044.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_1996_S3214044.cc)
@@ -0,0 +1,272 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Math/LorentzTrans.hh"
+#include "Rivet/Math/Vector3.hh"
+#include "Rivet/Math/Units.hh"
+
+namespace Rivet {
+
+
+  class D0_1996_S3214044 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    D0_1996_S3214044() 
+      : Analysis("D0_1996_S3214044") 
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(false);
+      
+      const FinalState fs(-4.5, 4.5);
+      addProjection(fs, "FS");
+      /// @todo Use correct jet algorithm
+      addProjection(FastJets(fs, FastJets::D0ILCONE, 0.7, 20.0*GeV), "ConeJets");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() {
+      
+      _h_3j_x3 = bookHistogram1D(1, 1, 1);
+      _h_3j_x5 = bookHistogram1D(2, 1, 1);
+      _h_3j_costheta3 = bookHistogram1D(3, 1, 1);
+      _h_3j_psi = bookHistogram1D(4, 1, 1);
+      _h_3j_mu34 = bookHistogram1D(5, 1, 1);
+      _h_3j_mu35 = bookHistogram1D(6, 1, 1);
+      _h_3j_mu45 = bookHistogram1D(7, 1, 1);
+      
+      _h_4j_x3 = bookHistogram1D(8, 1, 1);
+      _h_4j_x4 = bookHistogram1D(9, 1, 1);
+      _h_4j_x5 = bookHistogram1D(10, 1, 1);
+      _h_4j_x6 = bookHistogram1D(11, 1, 1);
+      _h_4j_costheta3 = bookHistogram1D(12, 1, 1);
+      _h_4j_costheta4 = bookHistogram1D(13, 1, 1);
+      _h_4j_costheta5 = bookHistogram1D(14, 1, 1);
+      _h_4j_costheta6 = bookHistogram1D(15, 1, 1);
+      _h_4j_cosomega34 = bookHistogram1D(16, 1, 1);
+      _h_4j_cosomega35 = bookHistogram1D(17, 1, 1);
+      _h_4j_cosomega36 = bookHistogram1D(18, 1, 1);
+      _h_4j_cosomega45 = bookHistogram1D(19, 1, 1);
+      _h_4j_cosomega46 = bookHistogram1D(20, 1, 1);
+      _h_4j_cosomega56 = bookHistogram1D(21, 1, 1);
+      _h_4j_mu34 = bookHistogram1D(22, 1, 1);
+      _h_4j_mu35 = bookHistogram1D(23, 1, 1);
+      _h_4j_mu36 = bookHistogram1D(24, 1, 1);
+      _h_4j_mu45 = bookHistogram1D(25, 1, 1);
+      _h_4j_mu46 = bookHistogram1D(26, 1, 1);
+      _h_4j_mu56 = bookHistogram1D(27, 1, 1);
+      _h_4j_theta_BZ = bookHistogram1D(28, 1, 1);
+      _h_4j_costheta_NR = bookHistogram1D(29, 1, 1);
+      
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      Jets jets_in;
+      foreach (const Jet& jet, applyProjection<FastJets>(event, "ConeJets").jetsByEt()) {
+        if (fabs(jet.momentum().eta()) < 3.0) {
+          jets_in.push_back(jet);
+        }
+      }
+      
+      Jets jets_isolated;
+      for (size_t i = 0; i < jets_in.size(); ++i) {
+        bool isolated=true;
+        for (size_t j = 0; j < jets_in.size(); ++j) {
+          if (i != j && deltaR(jets_in[i].momentum(), jets_in[j].momentum()) < 1.4) {
+            isolated = false;
+            break;
+          }
+        }
+        if (isolated) {
+          jets_isolated.push_back(jets_in[i]);
+        }
+      }
+      
+      if (jets_isolated.size() == 0 || jets_isolated[0].momentum().Et() < 60.0*GeV) {
+        vetoEvent;
+      }
+      
+      if (jets_isolated.size() > 2) _threeJetAnalysis(jets_isolated, weight);
+      if (jets_isolated.size() > 3) _fourJetAnalysis(jets_isolated, weight);
+    }
+
+
+    void finalize() {
+      normalize(_h_3j_x3, 1.0);
+      normalize(_h_3j_x5, 1.0);
+      normalize(_h_3j_costheta3, 1.0);
+      normalize(_h_3j_psi, 1.0);
+      normalize(_h_3j_mu34, 1.0);
+      normalize(_h_3j_mu35, 1.0);
+      normalize(_h_3j_mu45, 1.0);
+      normalize(_h_4j_x3, 1.0);
+      normalize(_h_4j_x4, 1.0);
+      normalize(_h_4j_x5, 1.0);
+      normalize(_h_4j_x6, 1.0);
+      normalize(_h_4j_costheta3, 1.0);
+      normalize(_h_4j_costheta4, 1.0);
+      normalize(_h_4j_costheta5, 1.0);
+      normalize(_h_4j_costheta6, 1.0);
+      normalize(_h_4j_cosomega34, 1.0);
+      normalize(_h_4j_cosomega35, 1.0);
+      normalize(_h_4j_cosomega36, 1.0);
+      normalize(_h_4j_cosomega45, 1.0);
+      normalize(_h_4j_cosomega46, 1.0);
+      normalize(_h_4j_cosomega56, 1.0);
+      normalize(_h_4j_mu34, 1.0);
+      normalize(_h_4j_mu35, 1.0);
+      normalize(_h_4j_mu36, 1.0);
+      normalize(_h_4j_mu45, 1.0);
+      normalize(_h_4j_mu46, 1.0);
+      normalize(_h_4j_mu56, 1.0);
+      normalize(_h_4j_theta_BZ, 1.0);
+      normalize(_h_4j_costheta_NR, 1.0);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Helper functions
+    //@{
+    
+    void _threeJetAnalysis(const Jets& jets, const double& weight) {    
+      // >=3 jet events
+      FourMomentum jjj(jets[0].momentum()+jets[1].momentum()+jets[2].momentum());
+      const double sqrts = jjj.mass();
+      if (sqrts<200*GeV) {
+        return;
+      }
+    
+      LorentzTransform cms_boost(-jjj.boostVector());
+      vector<FourMomentum> jets_boosted;
+      foreach (Jet jet, jets) {
+        jets_boosted.push_back(cms_boost.transform(jet.momentum()));
+      }
+      std::sort(jets_boosted.begin(), jets_boosted.end(), FourMomentum::byEDescending());
+      FourMomentum p3(jets_boosted[0]);
+      FourMomentum p4(jets_boosted[1]);
+      FourMomentum p5(jets_boosted[2]);
+      
+      Vector3 beam1(0.0, 0.0, 1.0);
+      Vector3 p1xp3 = beam1.cross(p3.vector3());
+      Vector3 p4xp5 = p4.vector3().cross(p5.vector3());
+      const double cospsi = p1xp3.dot(p4xp5)/p1xp3.mod()/p4xp5.mod();
+      
+      _h_3j_x3->fill(2.0*p3.E()/sqrts, weight);
+      _h_3j_x5->fill(2.0*p5.E()/sqrts, weight);
+      _h_3j_costheta3->fill(fabs(cos(p3.theta())), weight);
+      _h_3j_psi->fill(acos(cospsi)/degree, weight);
+      _h_3j_mu34->fill(FourMomentum(p3+p4).mass()/sqrts, weight);
+      _h_3j_mu35->fill(FourMomentum(p3+p5).mass()/sqrts, weight);
+      _h_3j_mu45->fill(FourMomentum(p4+p5).mass()/sqrts, weight);
+    }
+    
+    
+    void _fourJetAnalysis(const Jets& jets, const double& weight) {    
+      // >=4 jet events
+      FourMomentum jjjj(jets[0].momentum() + jets[1].momentum() + jets[2].momentum()+ jets[3].momentum());
+      const double sqrts = jjjj.mass();
+      if (sqrts < 200*GeV) return;
+      
+      LorentzTransform cms_boost(-jjjj.boostVector());
+      vector<FourMomentum> jets_boosted;
+      foreach (Jet jet, jets) {
+        jets_boosted.push_back(cms_boost.transform(jet.momentum()));
+      }
+      sort(jets_boosted.begin(), jets_boosted.end(), FourMomentum::byEDescending());
+      FourMomentum p3(jets_boosted[0]);
+      FourMomentum p4(jets_boosted[1]);
+      FourMomentum p5(jets_boosted[2]);
+      FourMomentum p6(jets_boosted[3]);
+      
+      Vector3 p3xp4 = p3.vector3().cross(p4.vector3());
+      Vector3 p5xp6 = p5.vector3().cross(p6.vector3());
+      const double costheta_BZ = p3xp4.dot(p5xp6)/p3xp4.mod()/p5xp6.mod();
+      const double costheta_NR = (p3.vector3()-p4.vector3()).dot(p5.vector3()-p6.vector3())/
+        (p3.vector3()-p4.vector3()).mod()/(p5.vector3()-p6.vector3()).mod();
+      
+      _h_4j_x3->fill(2.0*p3.E()/sqrts, weight);
+      _h_4j_x4->fill(2.0*p4.E()/sqrts, weight);
+      _h_4j_x5->fill(2.0*p5.E()/sqrts, weight);
+      _h_4j_x6->fill(2.0*p6.E()/sqrts, weight);
+      _h_4j_costheta3->fill(fabs(cos(p3.theta())), weight);
+      _h_4j_costheta4->fill(fabs(cos(p4.theta())), weight);
+      _h_4j_costheta5->fill(fabs(cos(p5.theta())), weight);
+      _h_4j_costheta6->fill(fabs(cos(p6.theta())), weight);
+      _h_4j_cosomega34->fill(cos(p3.angle(p4)), weight);
+      _h_4j_cosomega35->fill(cos(p3.angle(p5)), weight);
+      _h_4j_cosomega36->fill(cos(p3.angle(p6)), weight);
+      _h_4j_cosomega45->fill(cos(p4.angle(p5)), weight);
+      _h_4j_cosomega46->fill(cos(p4.angle(p6)), weight);
+      _h_4j_cosomega56->fill(cos(p5.angle(p6)), weight);
+      _h_4j_mu34->fill(FourMomentum(p3+p4).mass()/sqrts, weight);
+      _h_4j_mu35->fill(FourMomentum(p3+p5).mass()/sqrts, weight);
+      _h_4j_mu36->fill(FourMomentum(p3+p6).mass()/sqrts, weight);
+      _h_4j_mu45->fill(FourMomentum(p4+p5).mass()/sqrts, weight);
+      _h_4j_mu46->fill(FourMomentum(p4+p6).mass()/sqrts, weight);
+      _h_4j_mu56->fill(FourMomentum(p5+p6).mass()/sqrts, weight);
+      _h_4j_theta_BZ->fill(acos(costheta_BZ)/degree, weight);
+      _h_4j_costheta_NR->fill(costheta_NR, weight);
+      
+    }
+       
+
+  private:
+
+    /// @name Histograms
+    //@{
+
+    AIDA::IHistogram1D *_h_3j_x3;
+    AIDA::IHistogram1D *_h_3j_x5;
+    AIDA::IHistogram1D *_h_3j_costheta3;
+    AIDA::IHistogram1D *_h_3j_psi;
+    AIDA::IHistogram1D *_h_3j_mu34;
+    AIDA::IHistogram1D *_h_3j_mu35;
+    AIDA::IHistogram1D *_h_3j_mu45;
+    
+    AIDA::IHistogram1D *_h_4j_x3;
+    AIDA::IHistogram1D *_h_4j_x4;
+    AIDA::IHistogram1D *_h_4j_x5;
+    AIDA::IHistogram1D *_h_4j_x6;
+    AIDA::IHistogram1D *_h_4j_costheta3;
+    AIDA::IHistogram1D *_h_4j_costheta4;
+    AIDA::IHistogram1D *_h_4j_costheta5;
+    AIDA::IHistogram1D *_h_4j_costheta6;
+    AIDA::IHistogram1D *_h_4j_cosomega34;
+    AIDA::IHistogram1D *_h_4j_cosomega35;
+    AIDA::IHistogram1D *_h_4j_cosomega36;
+    AIDA::IHistogram1D *_h_4j_cosomega45;
+    AIDA::IHistogram1D *_h_4j_cosomega46;
+    AIDA::IHistogram1D *_h_4j_cosomega56;
+    AIDA::IHistogram1D *_h_4j_mu34;
+    AIDA::IHistogram1D *_h_4j_mu35;
+    AIDA::IHistogram1D *_h_4j_mu36;
+    AIDA::IHistogram1D *_h_4j_mu45;
+    AIDA::IHistogram1D *_h_4j_mu46;
+    AIDA::IHistogram1D *_h_4j_mu56;
+    AIDA::IHistogram1D *_h_4j_theta_BZ;
+    AIDA::IHistogram1D *_h_4j_costheta_NR;
+    //@}
+
+  }; 
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_1996_S3214044> plugin_D0_1996_S3214044;
+    
+}

Copied: trunk/src/Analyses/D0_1996_S3324664.cc (from r1802, trunk/src/Analyses/D0/D0_1996_S3324664.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_1996_S3324664.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_1996_S3324664.cc)
@@ -0,0 +1,119 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/BinnedHistogram.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/FinalState.hh"
+
+namespace Rivet {
+
+
+  class D0_1996_S3324664 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    D0_1996_S3324664() : Analysis("D0_1996_S3324664") {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(false);
+      
+      const FinalState fs(-3.2, 3.2);
+      addProjection(fs, "FS");
+      /// @todo Use correct jet algorithm
+      addProjection(FastJets(fs, FastJets::D0ILCONE, 0.7, 20.0*GeV), "ConeJets");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _h_deta = bookHistogram1D(1, 1, 1);
+      _h_dphi.addHistogram(0.0, 2.0, bookHistogram1D(2, 1, 1));
+      _h_dphi.addHistogram(2.0, 4.0, bookHistogram1D(2, 1, 2));
+      _h_dphi.addHistogram(4.0, 6.0, bookHistogram1D(2, 1, 3));
+      _h_cosdphi_deta = bookProfile1D(3, 1, 1);
+    }
+
+
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      Jets jets;
+      foreach (const Jet& jet, applyProjection<FastJets>(event, "ConeJets").jets()) {
+        if (fabs(jet.momentum().eta()) < 3.0) {
+          jets.push_back(jet);
+        }
+      }
+      
+      if (jets.size() < 2) {
+        vetoEvent;
+      }
+    
+      FourMomentum minjet = jets[0].momentum();
+      FourMomentum maxjet = jets[1].momentum();
+      double mineta = minjet.eta();
+      double maxeta = maxjet.eta();
+    
+      foreach(const Jet& jet, jets) {
+        double eta = jet.momentum().eta();
+        if (eta < mineta) {
+          minjet = jet.momentum();
+          mineta = eta;
+        }
+        else if (eta > maxeta) {
+          maxjet = jet.momentum();
+          maxeta = eta;
+        }
+      }
+      
+      if (minjet.Et()<50*GeV && maxjet.Et()<50.0*GeV) {
+        vetoEvent;
+      }
+      
+      double deta = maxjet.eta()-minjet.eta();
+      double dphi = mapAngle0To2Pi(maxjet.phi()-minjet.phi());
+      
+      _h_deta->fill(deta, weight);
+      _h_dphi.fill(deta, 1.0-dphi/M_PI, weight);
+      _h_cosdphi_deta->fill(deta, cos(M_PI-dphi), weight);
+      
+    }
+    
+    
+    void finalize() {
+      // Normalised to #events
+      normalize(_h_deta, 8830.0); 
+      
+      // I have no idea what this is normalised to... in the paper it says unity!
+      /// @todo Understand this!
+      foreach (IHistogram1D* histo, _h_dphi.getHistograms()) {
+        normalize(histo, 0.0798);
+      }
+      
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+
+    AIDA::IHistogram1D *_h_deta;
+    BinnedHistogram<double> _h_dphi;
+    AIDA::IProfile1D *_h_cosdphi_deta;
+    //@}
+
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_1996_S3324664> plugin_D0_1996_S3324664;
+
+
+}

Copied: trunk/src/Analyses/D0_2001_S4674421.cc (from r1802, trunk/src/Analyses/D0/D0_2001_S4674421.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2001_S4674421.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2001_S4674421.cc)
@@ -0,0 +1,196 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+
+namespace Rivet {
+
+
+  /// @brief D0 Run I differential W/Z boson cross-section analysis
+  /// @author Lars Sonnenschein
+  class D0_2001_S4674421 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor.
+    //  - @c _mwmz = ratio of \f$ mW/mZ \f$ used in the publication analysis
+    //  - @c _brwenu = ratio of \f$ BR(W->e,nu) \f$ used in the publication analysis
+    //  - @c _brzee = ratio of \f$ BR(Z->ee) \f$ used in the publication analysis
+    //  - @c _mZmin = lower Z mass cut used in the publication analysis
+    //  - @c _mZmax = upper Z mass cut used in the publication analysis
+    D0_2001_S4674421()
+      : Analysis("D0_2001_S4674421"),
+        _mwmz(0.8820), _brwenu(0.1073), _brzee(0.033632), 
+        _mZmin(75.*GeV), _mZmax(105.*GeV)
+    { 
+
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      //const FinalState fs(-3.0, 3.0); 
+      FinalState fs(-5.0, 5.0); //corrected for detector acceptance
+      addProjection(fs, "FS");
+
+      // Z -> e- e+
+      LeadingParticlesFinalState eeFS(fs, -2.5, 2.5, 0.); //20.);
+      eeFS.addParticleIdPair(ELECTRON);
+      addProjection(eeFS, "eeFS");
+      
+      // W- -> e- nu_e~
+      LeadingParticlesFinalState enuFS(fs, -2.5, 2.5, 0.); //25.);
+      enuFS.addParticleId(ELECTRON).addParticleId(NU_EBAR);
+      addProjection(enuFS, "enuFS");
+      
+      // W+ -> e+ nu_e
+      LeadingParticlesFinalState enubFS(fs, -2.5, 2.5, 0.); //25.);
+      enubFS.addParticleId(POSITRON).addParticleId(NU_E);
+      addProjection(enubFS, "enubFS");
+
+      // Remove neutrinos for isolation of final state particles
+      VetoedFinalState vfs(fs);
+      vfs.vetoNeutrinos();
+      addProjection(vfs, "VFS");
+      
+    }    
+    
+    
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      _eventsFilledW = 0.0;
+      _eventsFilledZ = 0.0;
+      _h_dsigdpt_w = bookHistogram1D(1, 1, 1);
+      _h_dsigdpt_z = bookHistogram1D(1, 1, 2);
+
+      vector<double> bins(23);
+      bins += 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 160, 200;
+      _h_dsigdpt_scaled_z = bookHistogram1D("d01-x01-y03", bins);
+    }
+
+
+
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+
+      const LeadingParticlesFinalState& eeFS = applyProjection<LeadingParticlesFinalState>(event, "eeFS");
+      if (eeFS.particles().size() == 2) {
+        // If there is a Z candidate:
+        static size_t Zcount = 0;
+        // Fill Z pT distributions
+        const ParticleVector& Zdaughters = eeFS.particles();
+        const FourMomentum pmom = Zdaughters[0].momentum() + Zdaughters[1].momentum();
+        double mass = sqrt(pmom.invariant());
+        if (mass/GeV > _mZmin && mass/GeV < _mZmax) {
+          ++Zcount;
+          _eventsFilledZ += weight;
+          getLog() << Log::DEBUG << "Z #" << Zcount << " pmom.pT() = " << pmom.pT()/GeV << " GeV" << endl;
+          _h_dsigdpt_z->fill(pmom.pT()/GeV, weight);
+          _h_dsigdpt_scaled_z->fill(pmom.pT()/GeV * _mwmz, weight);
+        }
+      } else { 
+        // There is no Z -> ee candidate... so this must be a W event, right?
+        const LeadingParticlesFinalState& enuFS = applyProjection<LeadingParticlesFinalState>(event, "enuFS");
+        const LeadingParticlesFinalState& enubFS = applyProjection<LeadingParticlesFinalState>(event, "enubFS"); 
+        static size_t Wcount = 0;
+
+        // Fill W pT distributions
+        ParticleVector Wdaughters;
+        if (enuFS.particles().size() == 2 && enubFS.empty()) {
+          Wdaughters = enuFS.particles();
+        } else if (enuFS.empty() && enubFS.particles().size() == 2) {
+          Wdaughters = enubFS.particles();
+        }
+        if (! Wdaughters.empty()) {
+          assert(Wdaughters.size() == 2);
+          const FourMomentum pmom = Wdaughters[0].momentum() + Wdaughters[1].momentum();
+          ++Wcount;
+          _eventsFilledW += weight;
+          _h_dsigdpt_w->fill(pmom.pT()/GeV, weight);
+        }
+      }
+    }
+
+
+
+    void finalize() { 
+      // Get cross-section per event (i.e. per unit weight) from generator
+      const double xSecPerEvent = crossSection()/picobarn / sumOfWeights();
+
+      // Correct W pT distribution to W cross-section
+      const double xSecW = xSecPerEvent * _eventsFilledW;
+
+      // Correct Z pT distribution to Z cross-section
+      const double xSecZ = xSecPerEvent * _eventsFilledZ;
+
+      // Get W and Z pT integrals
+      const double wpt_integral = integral(_h_dsigdpt_w);
+      const double zpt_scaled_integral = integral(_h_dsigdpt_scaled_z);
+
+      // Divide and scale ratio histos
+      AIDA::IDataPointSet* div = histogramFactory().divide(histoDir() + "/d02-x01-y01", *_h_dsigdpt_w, *_h_dsigdpt_scaled_z); 
+      div->setTitle("$[\\mathrm{d}\\sigma/\\mathrm{d}p_\\perp(W)] / [\\mathrm{d}\\sigma/\\mathrm{d}(p_\\perp(Z) \\cdot M_W/M_Z)]$");
+      if (xSecW == 0 || wpt_integral == 0 || xSecZ == 0 || zpt_scaled_integral == 0) {
+        getLog() << Log::WARN << "Not filling ratio plot because input histos are empty" << endl;
+      } else {
+        // Scale factor converts event counts to cross-sections, and inverts the
+        // branching ratios since only one decay channel has been analysed for each boson.
+        const double scalefactor = (xSecW / wpt_integral) / (xSecZ / zpt_scaled_integral) * (_brzee / _brwenu);
+        for (int pt = 0; pt < div->size(); ++pt) {
+          assert(div->point(pt)->dimension() == 2);
+          AIDA::IMeasurement* m = div->point(pt)->coordinate(1);
+          m->setValue(m->value() * scalefactor);
+          m->setErrorPlus(m->errorPlus() * scalefactor);
+          m->setErrorMinus(m->errorPlus() * scalefactor);
+        }
+      }
+
+      // Normalize non-ratio histos
+      normalize(_h_dsigdpt_w, xSecW);
+      normalize(_h_dsigdpt_z, xSecZ);
+      normalize(_h_dsigdpt_scaled_z, xSecZ);
+
+    }
+
+
+    //@}
+    
+  private:
+    
+    /// Analysis used ratio of mW/mZ 
+    const double _mwmz;
+    
+    /// Ratio of \f$ BR(W->e,nu) \f$ used in the publication analysis
+    const double _brwenu;
+    
+    /// Ratio of \f$ \text{BR}( Z \to e^+ e^-) \f$ used in the publication analysis
+    const double _brzee;
+    
+    /// Invariant mass cuts for Z boson candidate (75 GeV < mZ < 105 GeV)
+    const double _mZmin, _mZmax;
+
+
+    // Event counters for cross section normalizations
+    double _eventsFilledW;
+    double _eventsFilledZ;
+    
+    //@{
+    /// Histograms
+    AIDA::IHistogram1D* _h_dsigdpt_w;
+    AIDA::IHistogram1D* _h_dsigdpt_z;
+    AIDA::IHistogram1D* _h_dsigdpt_scaled_z;
+   //@}    
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2001_S4674421> plugin_D0_2001_S4674421;
+
+}

Copied: trunk/src/Analyses/D0_2004_S5992206.cc (from r1802, trunk/src/Analyses/D0/D0_2004_S5992206.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2004_S5992206.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2004_S5992206.cc)
@@ -0,0 +1,142 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+
+namespace Rivet {
+
+
+  /* @brief D0 Run II jet analysis
+   * @author Lars Sonnenschein
+   * 
+   * Measurement of angular correlations in di-jet events.
+   * 
+   * 
+   * @par Run conditions
+   * 
+   * @arg \f$ \sqrt{s} = \f$ 1960 GeV
+   * @arg Run with generic QCD events.
+   * @arg Several \f$ p_\perp^\text{min} \f$ cutoffs are probably required to fill the histograms:
+   *   @arg \f$ p_\perp^\text{min} = \f$ 50, 75, 100, 150 GeV for the four pT ranges respecively
+   * 
+   */ 
+  class D0_2004_S5992206 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor.
+    D0_2004_S5992206() : Analysis("D0_2004_S5992206")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      const FinalState fs(-3.0, 3.0);
+      addProjection(fs, "FS");
+      addProjection(FastJets(fs, FastJets::D0ILCONE, 0.7, 6*GeV), "Jets");
+      addProjection(TotalVisibleMomentum(fs), "CalMET");
+      addProjection(PVertex(), "PV");
+      
+      // Veto neutrinos, and muons with pT above 1.0 GeV
+      VetoedFinalState vfs(fs);
+      vfs.vetoNeutrinos();
+      vfs.addVetoDetail(MUON, 1.0, MAXDOUBLE);
+      addProjection(vfs, "VFS");
+    }
+
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() {
+      _histJetAzimuth_pTmax75_100  = bookHistogram1D(1, 2, 1);
+      _histJetAzimuth_pTmax100_130 = bookHistogram1D(2, 2, 1);
+      _histJetAzimuth_pTmax130_180 = bookHistogram1D(3, 2, 1);
+      _histJetAzimuth_pTmax180_    = bookHistogram1D(4, 2, 1);
+    }
+
+
+    /// Do the analysis
+    void analyze(const Event & event) {
+
+      // Analyse and print some info
+      const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
+      getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
+      
+      // Find vertex and check  that its z-component is < 50 cm from the nominal IP
+      const PVertex& pv = applyProjection<PVertex>(event, "PV");
+      if (fabs(pv.position().z())/cm > 50.0) vetoEvent;
+      
+      const Jets jets  = jetpro.jetsByPt(40.0*GeV);
+      if (jets.size() >= 2) {
+        getLog() << Log::DEBUG << "Jet multiplicity after pT > 40 GeV cut = " << jets.size() << endl; 
+      } else {
+        vetoEvent;
+      }
+      const double rap1 = jets[0].momentum().rapidity();
+      const double rap2 = jets[1].momentum().rapidity();
+      if (fabs(rap1) > 0.5 || fabs(rap2) > 0.5) {
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Jet eta and pT requirements fulfilled" << endl;
+      const double pT1 = jets[0].momentum().pT();
+      
+      const TotalVisibleMomentum& caloMissEt = applyProjection<TotalVisibleMomentum>(event, "CalMET");
+      getLog() << Log::DEBUG << "Missing Et = " << caloMissEt.momentum().pT()/GeV << endl;
+      if (caloMissEt.momentum().pT() > 0.7*pT1) {
+        vetoEvent;
+      }
+      
+      if (pT1/GeV >= 75.0) {
+        const double weight = event.weight();
+        const double dphi = deltaPhi(jets[0].momentum().phi(), jets[1].momentum().phi());
+        if (inRange(pT1/GeV, 75.0, 100.0)) {
+          _histJetAzimuth_pTmax75_100->fill(dphi, weight);
+        } else if (inRange(pT1/GeV, 100.0, 130.0)) {
+          _histJetAzimuth_pTmax100_130->fill(dphi, weight);
+        } else if (inRange(pT1/GeV, 130.0, 180.0)) {
+          _histJetAzimuth_pTmax130_180->fill(dphi, weight);
+        } else if (pT1/GeV > 180.0) {
+          _histJetAzimuth_pTmax180_->fill(dphi, weight);
+        }
+      }
+      
+    }
+    
+    
+    // Finalize
+    void finalize() { 
+      // Normalize histograms to unit area
+      normalize(_histJetAzimuth_pTmax75_100);
+      normalize(_histJetAzimuth_pTmax100_130);
+      normalize(_histJetAzimuth_pTmax130_180);
+      normalize(_histJetAzimuth_pTmax180_);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _histJetAzimuth_pTmax75_100;
+    AIDA::IHistogram1D* _histJetAzimuth_pTmax100_130;
+    AIDA::IHistogram1D* _histJetAzimuth_pTmax130_180;
+    AIDA::IHistogram1D* _histJetAzimuth_pTmax180_;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2004_S5992206> plugin_D0_2004_S5992206;
+
+}

Copied: trunk/src/Analyses/D0_2006_S6438750.cc (from r1802, trunk/src/Analyses/D0/D0_2006_S6438750.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2006_S6438750.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2006_S6438750.cc)
@@ -0,0 +1,137 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  /// @brief Inclusive isolated photon cross-section, differential in \f$ p_\perp(gamma) \f$.
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  class D0_2006_S6438750 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Default constructor.
+    D0_2006_S6438750() : Analysis("D0_2006_S6438750")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      /// @todo Use cross-section from generator
+      setNeedsCrossSection(true);
+      
+      // General FS for photon isolation
+      FinalState fs(-1.5, 1.5);
+      addProjection(fs, "AllFS");
+      
+      // Get leading photon
+      LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
+      photonfs.addParticleId(PHOTON);
+      addProjection(photonfs, "LeadingPhoton");
+    } 
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_pTgamma = bookHistogram1D(1, 1, 1);
+    }
+    
+
+    /// Do the analysis 
+    void analyze(const Event& event) {
+
+      // Get the photon
+      const FinalState& photonfs = applyProjection<FinalState>(event, "LeadingPhoton");
+      if (photonfs.particles().size() != 1) {
+        getLog() << Log::DEBUG << "No photon found" << endl;
+        vetoEvent;
+      }
+      const FourMomentum photon = photonfs.particles().front().momentum();
+      if (photon.pT()/GeV < 23) {
+        getLog() << Log::DEBUG << "Leading photon has pT < 23 GeV: " << photon.pT()/GeV << endl;
+        vetoEvent;
+      }
+      
+      // Get all other particles
+      const FinalState& fs = applyProjection<FinalState>(event, "AllFS");
+      if (fs.empty()) {
+        vetoEvent;
+      }
+      
+      // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
+      const double egamma = photon.E();
+      // Energy inside R = 0.2
+      double econe_02 = 0.0;
+      // Energy between R = [0.2, 0.4]
+      double econe_02_04 = 0.0;
+      foreach (const Particle& p, fs.particles()) {
+        const double dr = deltaR(photon.pseudorapidity(), photon.azimuthalAngle(),
+                                 p.momentum().pseudorapidity(), p.momentum().azimuthalAngle());
+        if (dr < 0.2) {
+          econe_02 += p.momentum().E();
+        } else if (dr < 0.4) {
+          econe_02_04 += p.momentum().E();
+        }
+      }
+      // Veto if outer hollow cone contains more than 10% of the energy of the inner cone
+      // or if the non-photon energy in the inner cone exceeds 5% of the photon energy.
+      if (econe_02_04/econe_02 > 0.1 || (econe_02-egamma)/egamma > 0.05) {
+        getLog() << Log::DEBUG << "Vetoing event because photon is insufficiently isolated" << endl;
+        vetoEvent;
+      }
+      
+      // Veto if leading jet is outside plotted rapidity regions
+      const double eta_gamma = fabs(photon.pseudorapidity());
+      if (eta_gamma > 0.9) {
+        getLog() << Log::DEBUG << "Leading photon falls outside acceptance range; "
+                 << "|eta_gamma| = " << eta_gamma << endl;
+        vetoEvent;
+      }
+      
+      // Fill histo
+      const double weight = event.weight();
+      _h_pTgamma->fill(photon.pT(), weight); 
+    }
+    
+    
+
+    // Finalize
+    void finalize() {
+      /// @todo Generator cross-section from Pythia gives ~7500, vs. expected 2988!
+      //normalize(_h_pTgamma, 2988.4869);
+      
+      const double lumi_gen = sumOfWeights()/crossSection();
+      // Divide by effective lumi, plus rapidity bin width of 1.8
+      scale(_h_pTgamma, 1/lumi_gen * 1/1.8);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _h_pTgamma;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2006_S6438750> plugin_D0_2006_S6438750;
+
+}

Copied: trunk/src/Analyses/D0_2007_S7075677.cc (from r1802, trunk/src/Analyses/D0/D0_2007_S7075677.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2007_S7075677.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2007_S7075677.cc)
@@ -0,0 +1,88 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ZFinder.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of D0 Run II Z pT diff cross-section shape
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  /// @author Frank Siegert
+  class D0_2007_S7075677 : public Analysis {
+
+  public:
+
+    /// Default constructor.
+    D0_2007_S7075677() : Analysis("D0_2007_S7075677")  
+    {
+      // Run II Z rapidity
+      setBeams(PROTON, ANTIPROTON);
+      
+
+      /// @todo Ask Gavin Hesketh about his first implemention without eta cuts.
+      vector<pair<double, double> > etaRanges;
+      // Remove eta cuts for the moment, because it seems like they have been
+      // corrected for.
+      // etaRanges.push_back(make_pair(-3.2, -1.5));
+      // etaRanges.push_back(make_pair(-0.9, 0.9));
+      // etaRanges.push_back(make_pair(1.5, 3.2));
+      ZFinder zfinder(etaRanges, 15.0*GeV, ELECTRON, 71.0*GeV, 111.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+    }
+    
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_yZ = bookHistogram1D(1, 1, 1);
+    }
+
+
+    /// Do the analysis 
+    void analyze(const Event & e) {
+      const double weight = e.weight();
+      
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size() == 1) {
+        const ParticleVector& el(zfinder.constituentsFinalState().particles());
+        if (el[0].momentum().pT() > 25.0*GeV || el[1].momentum().pT() > 25.0*GeV) {
+          double yZ = fabs(zfinder.particles()[0].momentum().rapidity());
+          _h_yZ->fill(yZ, weight);
+        }
+      }
+      else {
+        getLog() << Log::DEBUG << "No unique lepton pair found." << endl;
+      }
+    }
+    
+    
+    // Finalize
+    void finalize() {
+      // Data seems to have been normalized for the avg of the two sides 
+      // (+ve & -ve rapidity) rather than the sum, hence the 0.5:
+      normalize(_h_yZ, 0.5);
+    }
+
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_yZ;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2007_S7075677> plugin_D0_2007_S7075677;
+
+}

Copied: trunk/src/Analyses/D0_2008_S6879055.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S6879055.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S6879055.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S6879055.cc)
@@ -0,0 +1,175 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/InvMassFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of the ratio sigma(Z/gamma* + n jets)/sigma(Z/gamma*)
+  class D0_2008_S6879055 : public Analysis {
+  public:
+    
+    /// Default constructor.
+    D0_2008_S6879055() : Analysis("D0_2008_S6879055")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Basic final state
+      FinalState fs(-5.0, 5.0);
+      addProjection(fs, "FS");
+      
+      // Leading electrons in tracking acceptance
+      LeadingParticlesFinalState lpfs(fs, -1.1, 1.1, 25*GeV);
+      lpfs.addParticleId(ELECTRON).addParticleId(POSITRON);
+      addProjection(lpfs, "LeadingElectronsFS");
+      
+      // Invariant mass selection around Z pole
+      InvMassFinalState electronsFromZ(lpfs, make_pair(ELECTRON, POSITRON), 75*GeV, 105*GeV);
+      addProjection(electronsFromZ,"ElectronsFromZ");
+      
+      // Vetoed FS for jets
+      VetoedFinalState vfs(fs);
+      // Add particle/antiparticle vetoing
+      vfs.vetoNeutrinos();
+      // Veto the electrons from Z decay  
+      vfs.addVetoOnThisFinalState(electronsFromZ);
+      addProjection(vfs, "JetFS");
+      
+      // Jet finder
+      FastJets jets(vfs, FastJets::D0ILCONE, 0.5, 20.0*GeV);
+      addProjection(jets, "Jets");
+      
+      // Vertex
+      PVertex vertex;
+      addProjection(vertex, "PrimaryVertex");
+    } 
+
+
+    /// @name Analysis methods
+    //@{ 
+    
+    // Book histograms
+    void init() {
+      _crossSectionRatio = bookHistogram1D(1, 1, 1);
+      _pTjet1 = bookHistogram1D(2, 1, 1);
+      _pTjet2 = bookHistogram1D(3, 1, 1);
+      _pTjet3 = bookHistogram1D(4, 1, 1);
+    }
+    
+    
+    
+    /// Do the analysis 
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.empty()) {
+        vetoEvent;
+      }
+      
+      // Check that the primary vertex is within 60 cm in z from (0,0,0)
+      const PVertex& vertex = applyProjection<PVertex>(event, "PrimaryVertex");
+      getLog() << Log::DEBUG << "Primary vertex is at " << vertex.position()/cm << " cm" << endl;
+      if (fabs(vertex.position().z())/cm > 60) {
+        getLog() << Log::DEBUG << "Vertex z-position " << vertex.position().z()/cm << " is outside cuts" << endl;
+        vetoEvent;
+      }
+      
+      // Find the Z candidates
+      const InvMassFinalState& invmassfs = applyProjection<InvMassFinalState>(event, "ElectronsFromZ");
+      // If there is no Z candidate in the FinalState, skip the event
+      if (invmassfs.particles().size() != 2) {
+        getLog() << Log::DEBUG << "No Z candidate found" << endl;
+        vetoEvent;
+      }
+      
+      // Now build the list of jets on a FS without the electrons from Z
+      // Additional cuts on jets: |eta| < 2.5 and dR(j,leading electron) > 0.4
+      const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
+      const Jets jets = jetpro.jetsByPt(20.0*GeV);
+      vector<FourMomentum> finaljet_list;
+      foreach (const Jet& j, jets) {
+        const double jeta = j.momentum().pseudorapidity();
+        const double jphi = j.momentum().azimuthalAngle();
+        if (fabs(jeta) > 2.5) continue;
+        
+        FourMomentum e0 = invmassfs.particles()[0].momentum();
+        FourMomentum e1 = invmassfs.particles()[1].momentum();
+        const double e0eta = e0.pseudorapidity();
+        const double e0phi = e0.azimuthalAngle();
+        if (deltaR(e0eta, e0phi, jeta, jphi) < 0.4) continue;
+        
+        const double e1eta = e1.pseudorapidity();
+        const double e1phi = e1.azimuthalAngle();
+        if (deltaR(e1eta, e1phi, jeta, jphi) < 0.4) continue;
+        
+        // If we pass all cuts...
+        finaljet_list.push_back(j.momentum());
+      }
+      getLog() << Log::DEBUG << "Num jets passing = " << finaljet_list.size() << endl;
+      
+      // For normalisation of crossSection data (includes events with no jets passing cuts)
+      _crossSectionRatio->fill(0, weight);
+      
+      // Fill jet pT and multiplicities
+      if (finaljet_list.size() >= 1) {
+        _crossSectionRatio->fill(1, weight);
+        _pTjet1->fill(finaljet_list[0].pT(), weight);
+      }
+      if (finaljet_list.size() >= 2) {
+        _crossSectionRatio->fill(2, weight);
+        _pTjet2->fill(finaljet_list[1].pT(), weight);
+      }
+      if (finaljet_list.size() >= 3) {
+        _crossSectionRatio->fill(3, weight);
+        _pTjet3->fill(finaljet_list[2].pT(), weight);
+      }
+      if (finaljet_list.size() >= 4) {
+        _crossSectionRatio->fill(4, weight);
+      }
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      // Now divide by the inclusive result
+      _crossSectionRatio->scale(1.0/_crossSectionRatio->binHeight(0));
+      
+      // Normalise jet pT's to integral of data
+      // there is no other way to do this, because these quantities are not
+      // detector corrected
+      normalize(_pTjet1, 10439.0);
+      normalize(_pTjet2, 1461.5);
+      normalize(_pTjet3, 217.0);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _crossSectionRatio;
+    AIDA::IHistogram1D * _pTjet1;
+    AIDA::IHistogram1D * _pTjet2;
+    AIDA::IHistogram1D * _pTjet3;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S6879055> plugin_D0_2008_S6879055;
+  
+}

Copied: trunk/src/Analyses/D0_2008_S7554427.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S7554427.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S7554427.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S7554427.cc)
@@ -0,0 +1,88 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ZFinder.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of D0 Run II Z pT differential cross-section shape
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  /// @author Frank Siegert
+  class D0_2008_S7554427 : public Analysis {
+
+  public:
+
+    /// Default constructor.
+    D0_2008_S7554427()
+      : Analysis("D0_2008_S7554427")
+    {
+      // Run II Z pT
+      setBeams(PROTON, ANTIPROTON);
+      
+      ZFinder zfinder(-MAXRAPIDITY, MAXRAPIDITY, 0.0*GeV, ELECTRON,
+                      40.0*GeV, 200.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+    } 
+    
+    
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_ZpT         = bookHistogram1D(1, 1, 1);
+      _h_forward_ZpT = bookHistogram1D(3, 1, 1);
+    }
+
+
+
+    /// Do the analysis 
+    void analyze(const Event & e) {
+      const double weight = e.weight();
+
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size() == 1) {
+        double yZ = fabs(zfinder.particles()[0].momentum().rapidity());
+        double pTZ = zfinder.particles()[0].momentum().pT();
+        _h_ZpT->fill(pTZ, weight);
+        if (yZ > 2.0) {
+          _h_forward_ZpT->fill(pTZ, weight);
+        }
+      }
+      else {
+        getLog() << Log::DEBUG << "no unique lepton pair found." << endl;
+      }
+      
+    }
+    
+    
+    
+    // Finalize
+    void finalize() {
+      normalize(_h_ZpT);
+      normalize(_h_forward_ZpT);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_ZpT;
+    AIDA::IHistogram1D * _h_forward_ZpT;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S7554427> plugin_D0_2008_S7554427;
+
+}

Copied: trunk/src/Analyses/D0_2008_S7662670.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S7662670.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S7662670.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S7662670.cc)
@@ -0,0 +1,136 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of D0 differential jet cross sections
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  class D0_2008_S7662670 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    D0_2008_S7662670()
+      : Analysis("D0_2008_S7662670")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      // Full final state
+      FinalState fs(-5.0, 5.0);
+      addProjection(fs, "FS");
+      
+      FastJets jetpro(fs, FastJets::D0ILCONE, 0.7, 6*GeV);
+      addProjection(jetpro, "Jets");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() 
+    {
+      _h_dsigdptdy_y00_04 = bookHistogram1D(1, 1, 1);
+      _h_dsigdptdy_y04_08 = bookHistogram1D(2, 1, 1);
+      _h_dsigdptdy_y08_12 = bookHistogram1D(3, 1, 1);
+      _h_dsigdptdy_y12_16 = bookHistogram1D(4, 1, 1);
+      _h_dsigdptdy_y16_20 = bookHistogram1D(5, 1, 1);
+      _h_dsigdptdy_y20_24 = bookHistogram1D(6, 1, 1);
+    }
+    
+    
+    
+    /// Do the analysis 
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.empty()) {
+        getLog() << Log::DEBUG << "Empty event!" << endl;
+        vetoEvent;
+      }
+      
+      // Find the jets
+      const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
+      // If there are no jets, skip the event
+      if (jetpro.jets().size() == 0) {
+        getLog() << Log::DEBUG << "No jets found" << endl;
+        vetoEvent;
+      }
+
+      // Fill histo for each jet
+      foreach (const Jet& j, jetpro.jets()) {
+        const double pt = j.momentum().pT();
+        const double y = fabs(j.momentum().rapidity());
+        if (pt/GeV > 50) {
+          getLog() << Log::TRACE << "Filling histos: pT = " << pt/GeV 
+                   << ", |y| = " << y << endl;
+          if (y < 0.4) {
+            _h_dsigdptdy_y00_04->fill(pt/GeV, weight);
+          } else if (y < 0.8) {
+            _h_dsigdptdy_y04_08->fill(pt/GeV, weight);
+          } else if (y < 1.2) {
+            _h_dsigdptdy_y08_12->fill(pt/GeV, weight);
+          } else if (y < 1.6) {
+            _h_dsigdptdy_y12_16->fill(pt/GeV, weight);
+          } else if (y < 2.0) {
+            _h_dsigdptdy_y16_20->fill(pt/GeV, weight);
+          } else if (y < 2.4) {
+            _h_dsigdptdy_y20_24->fill(pt/GeV, weight);
+          }
+        }
+      }
+      
+    }
+    
+
+    /// Finalize
+    void finalize() {
+      /// Scale by L_eff = sig_MC * L_exp / num_MC
+      const double lumi_mc = sumOfWeights() / crossSection();
+      const double scalefactor =  1 / lumi_mc;
+      scale(_h_dsigdptdy_y00_04, scalefactor);
+      scale(_h_dsigdptdy_y04_08, scalefactor);
+      scale(_h_dsigdptdy_y08_12, scalefactor);
+      scale(_h_dsigdptdy_y12_16, scalefactor);
+      scale(_h_dsigdptdy_y16_20, scalefactor);
+      scale(_h_dsigdptdy_y20_24, scalefactor);
+    }
+
+    //@}
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _h_dsigdptdy_y00_04;
+    AIDA::IHistogram1D* _h_dsigdptdy_y04_08;
+    AIDA::IHistogram1D* _h_dsigdptdy_y08_12;
+    AIDA::IHistogram1D* _h_dsigdptdy_y12_16;
+    AIDA::IHistogram1D* _h_dsigdptdy_y16_20;
+    AIDA::IHistogram1D* _h_dsigdptdy_y20_24;
+    //@}
+
+  };
+    
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S7662670> plugin_D0_2008_S7662670;
+  
+}

Copied: trunk/src/Analyses/D0_2008_S7719523.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S7719523.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S7719523.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S7719523.cc)
@@ -0,0 +1,204 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of isolated gamma + jet + X differential cross-sections
+  /// Inclusive isolated gamma + jet cross-sections, differential in pT(gamma), for 
+  /// various photon and jet rapidity bins.
+  ///
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  class D0_2008_S7719523 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    D0_2008_S7719523()
+      : Analysis("D0_2008_S7719523")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      // General FS
+      FinalState fs(-5.0, 5.0);
+      addProjection(fs, "FS");
+      
+      // Get leading photon
+      LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
+      photonfs.addParticleId(PHOTON);
+      addProjection(photonfs, "LeadingPhoton");
+      
+      // FS for jets excludes the leading photon
+      VetoedFinalState vfs(fs);
+      vfs.addVetoOnThisFinalState(photonfs);
+      addProjection(vfs, "JetFS");
+    } 
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+    
+    /// Book histograms
+    void init() {
+      _h_central_same_cross_section = bookHistogram1D(1, 1, 1);
+      _h_central_opp_cross_section  = bookHistogram1D(2, 1, 1);
+      _h_forward_same_cross_section = bookHistogram1D(3, 1, 1);
+      _h_forward_opp_cross_section  = bookHistogram1D(4, 1, 1); 
+    }
+    
+    
+
+    /// Do the analysis 
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+
+      // Get the photon
+      const FinalState& photonfs = applyProjection<FinalState>(event, "LeadingPhoton");
+      if (photonfs.particles().size() != 1) {
+        getLog() << Log::DEBUG << "No photon found" << endl;
+        vetoEvent;
+      }
+      const FourMomentum photon = photonfs.particles().front().momentum();
+      if (photon.pT()/GeV < 30) {
+        getLog() << Log::DEBUG << "Leading photon has pT < 30 GeV: " << photon.pT()/GeV << endl;
+        vetoEvent;
+      }
+      
+      // Get all charged particles
+      const FinalState& fs = applyProjection<FinalState>(event, "JetFS");
+      if (fs.empty()) {
+        vetoEvent;
+      }
+      
+      // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
+      const double egamma = photon.E();
+      double econe = 0.0;
+      foreach (const Particle& p, fs.particles()) {
+        if (deltaR(photon, p.momentum()) < 0.4) {
+          econe += p.momentum().E();
+          // Veto as soon as E_cone gets larger
+          if (econe/egamma > 0.07) {
+            getLog() << Log::DEBUG << "Vetoing event because photon is insufficiently isolated" << endl;
+            vetoEvent;
+          }
+        }
+      }
+      
+      
+      /// @todo Allow proj creation w/o FS as ctor arg, so that calc can be used more easily.
+      FastJets jetpro(fs, FastJets::D0ILCONE, 0.7); //< @todo This fs arg makes no sense!
+      jetpro.calc(fs.particles());
+      Jets isolated_jets;
+      foreach (const Jet& j, jetpro.jets()) {
+        const FourMomentum pjet = j.momentum();
+        const double dr = deltaR(photon.pseudorapidity(), photon.azimuthalAngle(),
+                                 pjet.pseudorapidity(), pjet.azimuthalAngle());
+        if (dr > 0.7 && pjet.pT()/GeV > 15) {
+          isolated_jets.push_back(j);
+        }
+      }
+      
+      getLog() << Log::DEBUG << "Num jets after isolation and pT cuts = " 
+               << isolated_jets.size() << endl;
+      if (isolated_jets.empty()) {
+        getLog() << Log::DEBUG << "No jets pass cuts" << endl;
+        vetoEvent;
+      }
+      
+      // Sort by pT and get leading jet
+      sort(isolated_jets.begin(), isolated_jets.end(), cmpJetsByPt);
+      const FourMomentum leadingJet = isolated_jets.front().momentum();
+      int photon_jet_sign = sign( leadingJet.rapidity() * photon.rapidity() );
+      
+      // Veto if leading jet is outside plotted rapidity regions
+      const double abs_y1 = fabs(leadingJet.rapidity());
+      if (inRange(abs_y1, 0.8, 1.5) || abs_y1 > 2.5) {
+        getLog() << Log::DEBUG << "Leading jet falls outside acceptance range; |y1| = " 
+                 << abs_y1 << endl;
+        vetoEvent;
+      }
+      
+      // Fill histos
+      if (fabs(leadingJet.rapidity()) < 0.8) { 
+        if (photon_jet_sign >= 1) {
+          _h_central_same_cross_section->fill(photon.pT(), weight);
+        } else {
+          _h_central_opp_cross_section->fill(photon.pT(), weight);
+        }
+      } else if (inRange( fabs(leadingJet.rapidity()), 1.5, 2.5)) {
+        if (photon_jet_sign >= 1) {
+          _h_forward_same_cross_section->fill(photon.pT(), weight);
+        } else {
+          _h_forward_opp_cross_section->fill(photon.pT(), weight); 
+        }
+      }
+      
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      const double lumi_gen = sumOfWeights()/crossSection();
+      const double dy_photon = 2.0;
+      const double dy_jet_central = 1.6;
+      const double dy_jet_forward = 2.0;
+      
+      // Cross-section ratios (6 plots)
+      // Central/central and forward/forward ratios
+      AIDA::IHistogramFactory& hf = histogramFactory();
+      const string dir = histoDir();
+      
+      hf.divide(dir + "/d05-x01-y01", *_h_central_opp_cross_section, *_h_central_same_cross_section);
+      hf.divide(dir + "/d08-x01-y01", *_h_forward_opp_cross_section, *_h_forward_same_cross_section);
+      
+      // Central/forward ratio combinations
+      hf.divide(dir + "/d06-x01-y01", *_h_central_same_cross_section,
+                *_h_forward_same_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
+      hf.divide(dir + "/d07-x01-y01", *_h_central_opp_cross_section,
+                *_h_forward_same_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
+      hf.divide(dir + "/d09-x01-y01", *_h_central_same_cross_section,
+                *_h_forward_opp_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
+      hf.divide(dir + "/d10-x01-y01", *_h_central_opp_cross_section,
+                *_h_forward_opp_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
+      
+      // Use generator cross section for remaining histograms
+      scale(_h_central_same_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_central);
+      scale(_h_central_opp_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_central);
+      scale(_h_forward_same_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_forward);
+      scale(_h_forward_opp_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_forward);
+    }
+    
+    //@}
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _h_central_same_cross_section;
+    AIDA::IHistogram1D* _h_central_opp_cross_section;
+    AIDA::IHistogram1D* _h_forward_same_cross_section;
+    AIDA::IHistogram1D* _h_forward_opp_cross_section;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S7719523> plugin_D0_2008_S7719523;
+  
+}

Copied: trunk/src/Analyses/D0_2008_S7837160.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S7837160.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S7837160.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S7837160.cc)
@@ -0,0 +1,202 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/VetoedFinalState.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement of W charge asymmetry from D0 Run II
+  /// @author Andy Buckley
+  /// @author Gavin Hesketh
+  class D0_2008_S7837160 : public Analysis {
+
+  public:
+
+    /// Default constructor.
+    D0_2008_S7837160()
+      : Analysis("D0_2008_S7837160")
+    {
+      // Run II W charge asymmetry
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Leading electrons
+      FinalState fs(-5.0, 5.0);
+      
+      LeadingParticlesFinalState efs(fs);
+      efs.addParticleId(ELECTRON).addParticleId(POSITRON);
+      addProjection(efs, "WDecayE");
+      
+      LeadingParticlesFinalState nufs(fs);
+      nufs.addParticleId(NU_E).addParticleId(NU_EBAR);
+      addProjection(nufs, "WDecayNu");
+      
+      // Final state w/o electron
+      VetoedFinalState vfs(fs);
+      /// @todo A better way would be to have a "only photons FS". Add this projection.
+      vfs.addVetoOnThisFinalState(efs);
+      vfs.vetoNeutrinos();
+      addProjection(vfs, "NoElectronFS");
+    } 
+    
+    
+    /// @name Analysis methods
+    //@{ 
+    
+    // Book histograms
+    void init() {
+      _h_dsigplus_deta_25_35  = bookHistogram1D("dsigplus_deta_25_35", 10, 0.0, 3.2);
+      _h_dsigminus_deta_25_35 = bookHistogram1D("dsigminus_deta_25_35", 10, 0.0, 3.2);
+      _h_dsigplus_deta_35     = bookHistogram1D("dsigplus_deta_35", 10, 0.0, 3.2);
+      _h_dsigminus_deta_35    = bookHistogram1D("dsigminus_deta_35", 10, 0.0, 3.2);
+      _h_dsigplus_deta_25     = bookHistogram1D("dsigplus_deta_25", 10, 0.0, 3.2);
+      _h_dsigminus_deta_25    = bookHistogram1D("dsigminus_deta_25", 10, 0.0, 3.2);
+    }
+    
+    
+    /// Do the analysis 
+    void analyze(const Event & event) {
+      const double weight = event.weight();
+      
+      // Find the W decay products
+      const FinalState& efs = applyProjection<FinalState>(event, "WDecayE");
+      const FinalState& nufs = applyProjection<FinalState>(event, "WDecayNu");
+      
+      // If there is no e/nu_e pair in the FinalState, skip the event
+      if (efs.particles().size() < 1 || nufs.particles().size() < 1) {
+        getLog() << Log::DEBUG << "No e/nu_e pair found " << endl;
+        vetoEvent;
+      }
+      
+      // Identify leading nu and electron
+      ParticleVector es = efs.particles();
+      sort(es.begin(), es.end(), cmpParticleByEt);
+      Particle leading_e = es[0];
+      //
+      ParticleVector nus = nufs.particles();
+      sort(nus.begin(), nus.end(), cmpParticleByEt);
+      Particle leading_nu = nus[0];
+      
+      // Require that the neutrino has Et > 25 GeV
+      const FourMomentum nu = leading_nu.momentum();
+      if (nu.Et() < 25*GeV) {
+        getLog() << Log::DEBUG << "Neutrino fails Et cut" << endl;
+        vetoEvent;
+      }
+      
+      // Get "raw" electron 4-momentum and add back in photons that could have radiated from the electron
+      FourMomentum e = leading_e.momentum();
+      /// @todo Use ClusteredPhotons photon summing projection
+      const ParticleVector allparts = applyProjection<FinalState>(event, "NoElectronFS").particles();
+      const double HALO_RADIUS = 0.2;
+      foreach (const Particle& p, allparts) {
+        if (p.pdgId() == PHOTON) {
+          const double pho_eta = p.momentum().pseudorapidity();
+          const double pho_phi = p.momentum().azimuthalAngle();
+          if (deltaR(e.pseudorapidity(), e.azimuthalAngle(), pho_eta, pho_phi) < HALO_RADIUS) {
+            e += p.momentum();
+          }
+        }
+      }
+      
+      // Require that the electron has Et > 25 GeV
+      if (e.Et() < 25*GeV) {
+        getLog() << Log::DEBUG << "Electron fails Et cut" << endl;
+        vetoEvent;
+      }
+      
+      
+      const double eta_e = fabs(e.pseudorapidity());
+      const double et_e = e.Et();
+      const int chg_e = PID::threeCharge(leading_e.pdgId());
+      if (et_e < 35*GeV) {
+        // 25 < ET < 35
+        if (chg_e < 0) {
+          _h_dsigminus_deta_25_35->fill(eta_e, weight);
+        } else {
+          _h_dsigplus_deta_25_35->fill(eta_e, weight);
+        }
+      } else {
+        // ET > 35
+        if (chg_e < 0) {
+          _h_dsigminus_deta_35->fill(eta_e, weight);
+        } else {
+          _h_dsigplus_deta_35->fill(eta_e, weight);
+        }
+      }
+      // Inclusive: ET > 25
+      if (chg_e < 0) {
+        _h_dsigminus_deta_25->fill(eta_e, weight);
+      } else {
+        _h_dsigplus_deta_25->fill(eta_e, weight);
+      }
+    }
+    
+    
+    /// Finalize
+    void finalize() {
+      // Construct asymmetry: (dsig+/deta - dsig-/deta) / (dsig+/deta + dsig-/deta) for each Et region
+      AIDA::IHistogramFactory& hf = histogramFactory();
+      
+      const string basetitle = "W charge asymmetry for ";
+      const string xlabel = "$|\\eta|$ of leading electron";
+      const string ylabel = "A = "
+        "$(\\frac{\\mathrm{d}{\\sigma^+}}{\\mathrm{d}{|\\eta|}} - \\frac{\\mathrm{d}{\\sigma^-}}{\\mathrm{d}{|\\eta|}}) / "
+        "(\\frac{\\mathrm{d}{\\sigma^+}}{\\mathrm{d}{|\\eta|}} + \\frac{\\mathrm{d}{\\sigma^-}}{\\mathrm{d}{|\\eta|}})$";
+      
+      IHistogram1D* num25_35 = hf.subtract("/num25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
+      IHistogram1D* denom25_35 = hf.add("/denom25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
+      assert(num25_35 && denom25_35);
+      IDataPointSet* tot25_35 = hf.divide(histoDir() + "/d01-x01-y01", *num25_35, *denom25_35);
+      tot25_35->setTitle(basetitle + "$25 < E_\\perp < 35$ GeV");
+      tot25_35->setXTitle(xlabel);
+      tot25_35->setYTitle(ylabel);
+      hf.destroy(num25_35);
+      hf.destroy(denom25_35);
+      //
+      IHistogram1D* num35 = hf.subtract("/num35", *_h_dsigplus_deta_35, *_h_dsigminus_deta_35);
+      IHistogram1D* denom35 = hf.add("/denom35", *_h_dsigplus_deta_35, *_h_dsigminus_deta_35);
+      assert(num35 && denom35);
+      IDataPointSet* tot35 = hf.divide(histoDir() + "/d02-x01-y01", *num35, *denom35);
+      tot35->setTitle(basetitle + "$E_\\perp > 35$ GeV");
+      tot35->setXTitle(xlabel);
+      tot35->setYTitle(ylabel);
+      hf.destroy(num35);
+      hf.destroy(denom35);
+      //
+      IHistogram1D* num25 = hf.subtract("/num25", *_h_dsigplus_deta_25, *_h_dsigminus_deta_25);
+      IHistogram1D* denom25 = hf.add("/denom25", *_h_dsigplus_deta_25, *_h_dsigminus_deta_25);
+      assert(num25 && denom25);
+      IDataPointSet* tot25 = hf.divide(histoDir() + "/d03-x01-y01", *num25, *denom25);
+      tot25->setTitle(basetitle + "$E_\\perp > 35$ GeV");
+      tot25->setXTitle(xlabel);
+      tot25->setYTitle(ylabel);
+      hf.destroy(num25);
+      hf.destroy(denom25);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    /// @todo Move into histo manager
+    AIDA::IHistogram1D *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35;
+    AIDA::IHistogram1D *_h_dsigplus_deta_35, *_h_dsigminus_deta_35;
+    AIDA::IHistogram1D *_h_dsigplus_deta_25, *_h_dsigminus_deta_25;
+    //@}
+
+  };
+    
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S7837160> plugin_D0_2008_S7837160;
+  
+}

Copied: trunk/src/Analyses/D0_2008_S7863608.cc (from r1802, trunk/src/Analyses/D0/D0_2008_S7863608.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2008_S7863608.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2008_S7863608.cc)
@@ -0,0 +1,129 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ZFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measurement differential Z/gamma* + jet +X cross sections
+  /// @author Gavin Hesketh, Andy Buckley, Frank Siegert
+  class D0_2008_S7863608 : public Analysis {
+
+  public:
+
+    /// @name Construction
+    //@{
+    /// Constructor
+    D0_2008_S7863608()
+      : Analysis("D0_2008_S7863608")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      ZFinder zfinder(-1.7, 1.7, 15.0*GeV, MUON, 65.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+      
+      FastJets conefinder(zfinder.remainingFinalState(), FastJets::D0ILCONE, 0.5, 20.0*GeV);
+      addProjection(conefinder, "ConeFinder");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{     
+    
+    /// Book histograms
+    void init() {
+      _h_jet_pT_cross_section = bookHistogram1D(1, 1, 1);
+      _h_jet_y_cross_section = bookHistogram1D(2, 1, 1);
+      _h_Z_pT_cross_section = bookHistogram1D(3, 1, 1);
+      _h_Z_y_cross_section = bookHistogram1D(4, 1, 1);
+      _h_total_cross_section = bookHistogram1D(5, 1, 1);  
+    }
+    
+    
+
+    // Do the analysis 
+    void analyze(const Event& e) {
+      const double weight = e.weight();
+      
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size()==1) {
+        const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinder");
+        const Jets& jets = jetpro.jetsByPt(20.0*GeV);
+        Jets jets_cut;
+        foreach (const Jet& j, jets) {
+          if (fabs(j.momentum().pseudorapidity()) < 2.8) {
+            jets_cut.push_back(j);
+          }
+        }
+        
+        // Return if there are no jets:
+        if(jets_cut.size()<1) {
+          getLog() << Log::DEBUG << "Skipping event " << e.genEvent().event_number()
+                   << " because no jets pass cuts " << endl;
+          vetoEvent;
+        }
+        
+        // cut on Delta R between jet and muons
+        foreach (const Jet& j, jets_cut) {
+          foreach (const Particle& mu, zfinder.constituentsFinalState().particles()) {
+            if (deltaR(mu.momentum().pseudorapidity(), mu.momentum().azimuthalAngle(),
+                       j.momentum().pseudorapidity(), j.momentum().azimuthalAngle()) < 0.5) {
+              vetoEvent;
+            }
+          }
+        }
+        
+        const FourMomentum Zmom = zfinder.particles()[0].momentum();
+        
+        // In jet pT
+        _h_jet_pT_cross_section->fill( jets_cut[0].momentum().pT(), weight);
+        _h_jet_y_cross_section->fill( fabs(jets_cut[0].momentum().rapidity()), weight);
+        
+        // In Z pT
+        _h_Z_pT_cross_section->fill(Zmom.pT(), weight);
+        _h_Z_y_cross_section->fill(fabs(Zmom.rapidity()), weight);
+        
+        _h_total_cross_section->fill(1960.0, weight);
+      }
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      const double invlumi = crossSection()/sumOfWeights();
+      scale(_h_total_cross_section, invlumi);
+      scale(_h_jet_pT_cross_section, invlumi);
+      scale(_h_jet_y_cross_section, invlumi);
+      scale(_h_Z_pT_cross_section, invlumi);
+      scale(_h_Z_y_cross_section, invlumi);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_jet_pT_cross_section;
+    AIDA::IHistogram1D * _h_jet_y_cross_section;
+    AIDA::IHistogram1D * _h_Z_pT_cross_section;
+    AIDA::IHistogram1D * _h_Z_y_cross_section;
+    AIDA::IHistogram1D * _h_total_cross_section;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2008_S7863608> plugin_D0_2008_S7863608;
+  
+}

Copied: trunk/src/Analyses/D0_2009_S8202443.cc (from r1802, trunk/src/Analyses/D0/D0_2009_S8202443.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2009_S8202443.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2009_S8202443.cc)
@@ -0,0 +1,159 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ZFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  class D0_2009_S8202443 : public Analysis {
+
+  public:
+
+    /// @name Construction
+    //@{
+    /// Constructor
+    D0_2009_S8202443()
+      : Analysis("D0_2009_S8202443"),
+        _sum_of_weights(0.0), _sum_of_weights_constrained(0.0)
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Leptons in constrained tracking acceptance
+      vector<pair<double, double> > etaRanges;
+      etaRanges.push_back(make_pair(-2.5, -1.5));
+      etaRanges.push_back(make_pair(-1.1, 1.1));
+      etaRanges.push_back(make_pair(1.5, 2.5));
+      ZFinder zfinder_constrained(etaRanges, 25.0*GeV, ELECTRON,
+                                  65.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zfinder_constrained, "ZFinderConstrained");
+      FastJets conefinder_constrained(zfinder_constrained.remainingFinalState(),
+                                      FastJets::D0ILCONE, 0.5, 20.0*GeV);
+      addProjection(conefinder_constrained, "ConeFinderConstrained");
+      
+      // Unconstrained leptons
+      ZFinder zfinder(FinalState(), ELECTRON, 65.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+      FastJets conefinder(zfinder.remainingFinalState(), FastJets::D0ILCONE, 0.5, 20.0*GeV);
+      addProjection(conefinder, "ConeFinder");
+    } 
+
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_jet1_pT_constrained = bookHistogram1D(1, 1, 1);
+      _h_jet2_pT_constrained = bookHistogram1D(3, 1, 1);
+      _h_jet3_pT_constrained = bookHistogram1D(5, 1, 1);
+      _h_jet1_pT = bookHistogram1D(2, 1, 1);
+      _h_jet2_pT = bookHistogram1D(4, 1, 1);
+      _h_jet3_pT = bookHistogram1D(6, 1, 1);
+    }
+    
+    
+    
+    // Do the analysis 
+    void analyze(const Event& e) {
+      double weight = e.weight();
+      
+      // unconstrained electrons first
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size()==1) {
+        _sum_of_weights += weight;
+        const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinder");
+        const Jets& jets = jetpro.jetsByPt(20.0*GeV);
+        Jets jets_cut;
+        foreach (const Jet& j, jets) {
+          if (fabs(j.momentum().pseudorapidity()) < 2.5) {
+            jets_cut.push_back(j);
+          }
+        }
+        
+        if (jets_cut.size()>0) {
+          _h_jet1_pT->fill(jets_cut[0].momentum().pT()/GeV, weight);
+        }
+        if (jets_cut.size()>1) {
+          _h_jet2_pT->fill(jets_cut[1].momentum().pT()/GeV, weight);
+        }
+        if (jets_cut.size()>2) {
+          _h_jet3_pT->fill(jets_cut[2].momentum().pT()/GeV, weight);
+        }
+      }
+      else {
+        getLog() << Log::DEBUG << "no unique lepton pair found." << endl;
+      }
+      
+      
+      // constrained electrons
+      const ZFinder& zfinder_constrained = applyProjection<ZFinder>(e, "ZFinderConstrained");
+      if (zfinder_constrained.particles().size()==1) {
+        _sum_of_weights_constrained += weight;
+        const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinderConstrained");
+        const Jets& jets = jetpro.jetsByPt(20.0*GeV);
+        Jets jets_cut;
+        foreach (const Jet& j, jets) {
+          if (fabs(j.momentum().pseudorapidity()) < 2.5) {
+            jets_cut.push_back(j);
+          }
+        }
+        
+        if (jets_cut.size()>0) {
+          _h_jet1_pT_constrained->fill(jets_cut[0].momentum().pT()/GeV, weight);
+        }
+        if (jets_cut.size()>1) {
+          _h_jet2_pT_constrained->fill(jets_cut[1].momentum().pT()/GeV, weight);
+        }
+        if (jets_cut.size()>2) {
+          _h_jet3_pT_constrained->fill(jets_cut[2].momentum().pT()/GeV, weight);
+        }
+      }
+      else {
+        getLog() << Log::DEBUG << "no unique lepton pair found." << endl;
+        vetoEvent;
+      }
+    }
+    
+    
+    
+    // Finalize
+    void finalize() {
+      scale(_h_jet1_pT, 1.0/_sum_of_weights);
+      scale(_h_jet2_pT, 1.0/_sum_of_weights);
+      scale(_h_jet3_pT, 1.0/_sum_of_weights);
+      scale(_h_jet1_pT_constrained, 1.0/_sum_of_weights_constrained);
+      scale(_h_jet2_pT_constrained, 1.0/_sum_of_weights_constrained);
+      scale(_h_jet3_pT_constrained, 1.0/_sum_of_weights_constrained);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_jet1_pT;
+    AIDA::IHistogram1D * _h_jet2_pT;
+    AIDA::IHistogram1D * _h_jet3_pT;
+    AIDA::IHistogram1D * _h_jet1_pT_constrained;
+    AIDA::IHistogram1D * _h_jet2_pT_constrained;
+    AIDA::IHistogram1D * _h_jet3_pT_constrained;
+    //@}
+    
+    double _sum_of_weights, _sum_of_weights_constrained;
+
+  };
+
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2009_S8202443> plugin_D0_2009_S8202443;
+  
+}

Copied: trunk/src/Analyses/D0_2009_S8320160.cc (from r1802, trunk/src/Analyses/D0/D0_2009_S8320160.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2009_S8320160.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2009_S8320160.cc)
@@ -0,0 +1,98 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Tools/BinnedHistogram.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  class D0_2009_S8320160 : public Analysis {
+
+  public:
+
+    /// @name Construction
+    //@{
+
+    /// Constructor
+    D0_2009_S8320160()
+      : Analysis("D0_2009_S8320160")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      FinalState fs;
+      FastJets conefinder(fs, FastJets::D0ILCONE, 0.7);
+      addProjection(conefinder, "ConeFinder");
+    } 
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{ 
+    
+    // Book histograms
+    void init() {
+      _h_chi_dijet.addHistogram(250., 300., bookHistogram1D(1, 1, 1));
+      _h_chi_dijet.addHistogram(300., 400., bookHistogram1D(2, 1, 1));
+      _h_chi_dijet.addHistogram(400., 500., bookHistogram1D(3, 1, 1));
+      _h_chi_dijet.addHistogram(500., 600., bookHistogram1D(4, 1, 1));
+      _h_chi_dijet.addHistogram(600., 700., bookHistogram1D(5, 1, 1));
+      _h_chi_dijet.addHistogram(700., 800., bookHistogram1D(6, 1, 1));
+      _h_chi_dijet.addHistogram(800., 900., bookHistogram1D(7, 1, 1));
+      _h_chi_dijet.addHistogram(900., 1000., bookHistogram1D(8, 1, 1));
+      _h_chi_dijet.addHistogram(1000., 1100., bookHistogram1D(9, 1, 1));
+      _h_chi_dijet.addHistogram(1100., 1960, bookHistogram1D(10, 1, 1));
+    }
+    
+    
+    
+    /// Do the analysis 
+    void analyze(const Event & e) {
+      const double weight = e.weight();
+      
+      const Jets& jets = applyProjection<JetAlg>(e, "ConeFinder").jetsByPt();      
+      if (jets.size() < 2) vetoEvent;
+    
+      FourMomentum j0(jets[0].momentum());
+      FourMomentum j1(jets[1].momentum());
+      double y0 = j0.rapidity();
+      double y1 = j1.rapidity();
+      
+      if (fabs(y0+y1)>2) vetoEvent;
+      
+      double mjj = FourMomentum(j0+j1).mass();
+      double chi = exp(fabs(y0-y1));
+      _h_chi_dijet.fill(mjj, chi, weight);
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      foreach (AIDA::IHistogram1D* hist, _h_chi_dijet.getHistograms()) {
+        normalize(hist);
+      }
+    }
+
+    //@}
+    
+    
+  private:
+    
+    /// @name Histograms
+    //@{
+    BinnedHistogram<double> _h_chi_dijet;
+    //@}
+    
+  };
+
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2009_S8320160> plugin_D0_2009_S8320160;
+  
+}

Copied: trunk/src/Analyses/D0_2009_S8349509.cc (from r1802, trunk/src/Analyses/D0/D0_2009_S8349509.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/D0_2009_S8349509.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/D0/D0_2009_S8349509.cc)
@@ -0,0 +1,147 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ZFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  class D0_2009_S8349509 : public Analysis {
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    D0_2009_S8349509() : 
+      Analysis("D0_2009_S8349509") 
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      ZFinder zfinder(-1.7, 1.7, 15.0*GeV, MUON, 65.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+      
+      FastJets conefinder(zfinder.remainingFinalState(), FastJets::D0ILCONE, 0.5, 20.0*GeV);
+      addProjection(conefinder, "ConeFinder");
+    }
+    
+    //@}
+
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      
+      _h_dphi_jet_Z25 = bookHistogram1D(1, 1, 1);
+      _h_dphi_jet_Z45 = bookHistogram1D(2, 1, 1);
+      
+      _h_dy_jet_Z25 = bookHistogram1D(3, 1, 1);
+      _h_dy_jet_Z45 = bookHistogram1D(4, 1, 1);
+      
+      _h_yboost_jet_Z25 = bookHistogram1D(5, 1, 1);
+      _h_yboost_jet_Z45 = bookHistogram1D(6, 1, 1);
+      
+      _inclusive_Z_sumofweights = 0.0;
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      const ZFinder& zfinder = applyProjection<ZFinder>(event, "ZFinder");
+      if (zfinder.particles().size()==1) {
+        // count inclusive sum of weights for histogram normalisation
+        _inclusive_Z_sumofweights += weight;
+        
+        Jets jets;
+        foreach (const Jet& j, applyProjection<JetAlg>(event, "ConeFinder").jetsByPt()) {
+          if (fabs(j.momentum().pseudorapidity()) < 2.8) {
+            jets.push_back(j);
+            break;
+          }
+        }
+        
+        // Return if there are no jets:
+        if (jets.size() < 1) {
+          getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                   << " because no jets pass cuts " << endl;
+          vetoEvent;
+        }
+        
+        // Cut on Delta R between jet and muons
+        foreach (const Jet& j, jets) {
+          foreach (const Particle& mu, zfinder.constituentsFinalState().particles()) {
+            if (deltaR(mu.momentum(), j.momentum()) < 0.5) {
+              vetoEvent;
+            }
+          }
+        }
+        
+        const FourMomentum Zmom = zfinder.particles()[0].momentum();
+        const FourMomentum jetmom = jets[0].momentum();
+        double yZ = Zmom.rapidity();
+        double yjet = jetmom.rapidity();
+        double dphi = deltaPhi(Zmom.phi(), jetmom.phi());
+        double dy = fabs(yZ-yjet);
+        double yboost = fabs(yZ+yjet)/2.0;
+        
+        if (Zmom.pT() > 25.0*GeV) {
+          _h_dphi_jet_Z25->fill(dphi,weight);
+          _h_dy_jet_Z25->fill(dy, weight);
+          _h_yboost_jet_Z25->fill(yboost, weight);
+        }
+        if (Zmom.pT() > 45.0*GeV) {
+          _h_dphi_jet_Z45->fill(dphi,weight);
+          _h_dy_jet_Z45->fill(dy, weight);
+          _h_yboost_jet_Z45->fill(yboost, weight);
+        }
+      }
+      
+    }
+    
+    
+    void finalize() {
+      if (_inclusive_Z_sumofweights == 0.0) return;
+      scale(_h_dphi_jet_Z25, 1.0/_inclusive_Z_sumofweights);
+      scale(_h_dphi_jet_Z45, 1.0/_inclusive_Z_sumofweights);
+      scale(_h_dy_jet_Z25, 1.0/_inclusive_Z_sumofweights);
+      scale(_h_dy_jet_Z45, 1.0/_inclusive_Z_sumofweights);
+      scale(_h_yboost_jet_Z25, 1.0/_inclusive_Z_sumofweights);
+      scale(_h_yboost_jet_Z45, 1.0/_inclusive_Z_sumofweights);
+    }
+    
+    //@}
+
+  private:
+
+    // Data members like post-cuts event weight counters go here
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_h_dphi_jet_Z25;
+    AIDA::IHistogram1D *_h_dphi_jet_Z45;
+
+    AIDA::IHistogram1D *_h_dy_jet_Z25;
+    AIDA::IHistogram1D *_h_dy_jet_Z45;
+
+    AIDA::IHistogram1D *_h_yboost_jet_Z25;
+    AIDA::IHistogram1D *_h_yboost_jet_Z45;
+    //@}
+    
+    double _inclusive_Z_sumofweights;
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<D0_2009_S8349509> plugin_D0_2009_S8349509;
+  
+}

Copied: trunk/src/Analyses/DELPHI_1995_S3137023.cc (from r1802, trunk/src/Analyses/LEP/DELPHI_1995_S3137023.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/DELPHI_1995_S3137023.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/DELPHI_1995_S3137023.cc)
@@ -0,0 +1,109 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/UnstableFinalState.hh"
+
+namespace Rivet {
+
+
+  /// @brief DELPHI strange baryon paper
+  /// @author Hendrik Hoeth
+  class DELPHI_1995_S3137023 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_1995_S3137023() 
+      : Analysis("DELPHI_1995_S3137023")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      _weightedTotalNumXiMinus = 0;
+      _weightedTotalNumSigma1385Plus = 0;
+    }
+
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _histXpXiMinus       = bookHistogram1D(2, 1, 1);
+      _histXpSigma1385Plus = bookHistogram1D(3, 1, 1);
+    }
+
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+      
+      foreach (const Particle& p, ufs.particles()) {
+        const int id = abs(p.pdgId());
+        switch (id) {
+        case 3312:
+          _histXpXiMinus->fill(p.momentum().vector3().mod()/meanBeamMom, weight);
+          _weightedTotalNumXiMinus += weight;
+          break;
+        case 3114:
+          _histXpSigma1385Plus->fill(p.momentum().vector3().mod()/meanBeamMom, weight);
+          _weightedTotalNumSigma1385Plus += weight;
+          break;
+        }
+      }
+      
+    }
+        
+   
+    /// Finalize
+    void finalize() { 
+      normalize(_histXpXiMinus       , _weightedTotalNumXiMinus/sumOfWeights());
+      normalize(_histXpSigma1385Plus , _weightedTotalNumSigma1385Plus/sumOfWeights());
+    }
+    
+    //@}
+
+
+  private:
+    
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalNumXiMinus;
+    double _weightedTotalNumSigma1385Plus;
+    
+    AIDA::IHistogram1D *_histXpXiMinus;
+    AIDA::IHistogram1D *_histXpSigma1385Plus;
+    //@}
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<DELPHI_1995_S3137023> plugin_DELPHI_1995_S3137023;
+  
+}

Copied: trunk/src/Analyses/DELPHI_1996_S3430090.cc (from r1802, trunk/src/Analyses/LEP/DELPHI_1996_S3430090.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/DELPHI_1996_S3430090.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/DELPHI_1996_S3430090.cc)
@@ -0,0 +1,534 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/Sphericity.hh"
+#include "Rivet/Projections/Thrust.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/ParisiTensor.hh"
+#include "Rivet/Projections/Hemispheres.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/UnstableFinalState.hh"
+
+namespace Rivet {
+
+
+  /**
+   * @brief DELPHI event shapes and identified particle spectra
+   * @author Andy Buckley
+   * @author Hendrik Hoeth
+   *
+   * This is the paper which was used for the original PROFESSOR MC tuning
+   * study. It studies a wide range of e+ e- event shape variables, differential
+   * jet rates in the Durham and JADE schemes, and incorporates identified
+   * particle spectra, from other LEP analyses.
+   *
+   *
+   * @par Run conditions
+   *
+   * @arg LEP1 beam energy: \f$ \sqrt{s} = \$f 91.2 GeV
+   * @arg Run with generic QCD events.
+   * @arg No \f$ p_\perp^\text{min} \f$ cutoff is required
+   */
+  class DELPHI_1996_S3430090 : public Analysis {
+  public:
+  
+    /// Constructor
+    DELPHI_1996_S3430090() 
+      : Analysis("DELPHI_1996_S3430090")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
+      addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
+      addProjection(Sphericity(cfs), "Sphericity");
+      addProjection(ParisiTensor(cfs), "Parisi");
+      const Thrust thrust(cfs);
+      addProjection(thrust, "Thrust");
+      addProjection(Hemispheres(thrust), "Hemispheres");
+      _weightedTotalPartNum = 0;
+      _passedCutWeightSum = 0;
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+      const double weight = e.weight();
+      _passedCutWeightSum += weight;
+      _weightedTotalPartNum += numParticles * weight;
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      // Thrusts
+      getLog() << Log::DEBUG << "Calculating thrust" << endl;
+      const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
+      _hist1MinusT->fill(1 - thrust.thrust(), weight); 
+      _histTMajor->fill(thrust.thrustMajor(), weight); 
+      _histTMinor->fill(thrust.thrustMinor(), weight); 
+      _histOblateness->fill(thrust.oblateness(), weight);
+      
+      // Jets
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      if (durjet.clusterSeq()) {
+        _histDiffRate2Durham->fill(durjet.clusterSeq()->exclusive_ymerge(2), weight); 
+        _histDiffRate3Durham->fill(durjet.clusterSeq()->exclusive_ymerge(3), weight); 
+        _histDiffRate4Durham->fill(durjet.clusterSeq()->exclusive_ymerge(4), weight); 
+      }
+      const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
+      if (jadejet.clusterSeq()) {
+        _histDiffRate2Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(2), weight); 
+        _histDiffRate3Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(3), weight); 
+        _histDiffRate4Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(4), weight); 
+      }
+      
+      // Sphericities
+      getLog() << Log::DEBUG << "Calculating sphericity" << endl;
+      const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
+      _histSphericity->fill(sphericity.sphericity(), weight); 
+      _histAplanarity->fill(sphericity.aplanarity(), weight); 
+      _histPlanarity->fill(sphericity.planarity(), weight); 
+      
+      // C & D params
+      getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
+      const ParisiTensor& parisi = applyProjection<ParisiTensor>(e, "Parisi");
+      _histCParam->fill(parisi.C(), weight);
+      _histDParam->fill(parisi.D(), weight);
+      
+      // Hemispheres
+      getLog() << Log::DEBUG << "Calculating hemisphere variables" << endl;
+      const Hemispheres& hemi = applyProjection<Hemispheres>(e, "Hemispheres");
+      _histHemiMassH->fill(hemi.getScaledM2high(), weight); 
+      _histHemiMassL->fill(hemi.getScaledM2low(), weight); 
+      _histHemiMassD->fill(hemi.getScaledM2diff(), weight); 
+      _histHemiBroadW->fill(hemi.getBmax(), weight); 
+      _histHemiBroadN->fill(hemi.getBmin(), weight); 
+      _histHemiBroadT->fill(hemi.getBsum(), weight); 
+      _histHemiBroadD->fill(hemi.getBdiff(), weight); 
+      
+      // Iterate over all the charged final state particles.
+      double Evis = 0.0;
+      double Evis2 = 0.0;
+      getLog() << Log::DEBUG << "About to iterate over charged FS particles" << endl;
+      for (ParticleVector::const_iterator p = fs.particles().begin(); p != fs.particles().end(); ++p) {
+        // Get momentum and energy of each particle.
+        const Vector3 mom3 = p->momentum().vector3();
+        const double energy = p->momentum().E();
+        Evis += energy;
+        
+        // Scaled momenta.
+        const double mom = mom3.mod();
+        const double scaledMom = mom/meanBeamMom;
+        const double logInvScaledMom = -std::log(scaledMom);
+        _histLogScaledMom->fill(logInvScaledMom, weight); 
+        _histScaledMom->fill(scaledMom, weight); 
+        
+        // Get momenta components w.r.t. thrust and sphericity.
+        const double momT = dot(thrust.thrustAxis(), mom3);
+        const double momS = dot(sphericity.sphericityAxis(), mom3);
+        const double pTinT = dot(mom3, thrust.thrustMajorAxis());
+        const double pToutT = dot(mom3, thrust.thrustMinorAxis());
+        const double pTinS = dot(mom3, sphericity.sphericityMajorAxis());
+        const double pToutS = dot(mom3, sphericity.sphericityMinorAxis());
+        const double pT = sqrt(pow(pTinT, 2) + pow(pToutT, 2));
+        _histPtTIn->fill(fabs(pTinT/GeV), weight);
+        _histPtTOut->fill(fabs(pToutT/GeV), weight);
+        _histPtSIn->fill(fabs(pTinS/GeV), weight);
+        _histPtSOut->fill(fabs(pToutS/GeV), weight);
+        _histPtVsXp->fill(scaledMom, fabs(pT/GeV), weight);
+        _histPtTOutVsXp->fill(scaledMom, fabs(pToutT/GeV), weight);
+        
+        // Calculate rapidities w.r.t. thrust and sphericity.
+        const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
+        const double rapidityS = 0.5 * std::log((energy + momS) / (energy - momS));
+        _histRapidityT->fill(rapidityT, weight); 
+        _histRapidityS->fill(rapidityS, weight); 
+      }
+      Evis2 = Evis*Evis;
+      
+      for (ParticleVector::const_iterator p_i = fs.particles().begin(); p_i != fs.particles().end(); ++p_i) {
+        for (ParticleVector::const_iterator p_j = p_i; p_j != fs.particles().end(); ++p_j) {
+          if (p_i == p_j) continue;
+          const Vector3 mom3_i = p_i->momentum().vector3();
+          const Vector3 mom3_j = p_j->momentum().vector3();
+          const double energy_i = p_i->momentum().E();
+          const double energy_j = p_j->momentum().E();
+          const double cosij = dot(mom3_i.unit(), mom3_j.unit());
+          const double eec = (energy_i*energy_j) / Evis2;
+          _histEEC->fill(cosij, eec*weight);
+          _histAEEC->fill( cosij,  eec*weight);
+          _histAEEC->fill(-cosij, -eec*weight);
+        }
+      }
+      
+      _histMultiCharged->fill(_histMultiCharged->binMean(0), numParticles*weight);
+      
+      
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+      
+      foreach (const Particle& p, ufs.particles()) {
+        int id = abs(p.pdgId());
+        switch (id) {
+        case 211:
+          _histMultiPiPlus->fill(_histMultiPiPlus->binMean(0), weight);
+          break;
+        case 111:
+          _histMultiPi0->fill(_histMultiPi0->binMean(0), weight);
+          break;
+        case 321:
+          _histMultiKPlus->fill(_histMultiKPlus->binMean(0), weight);
+          break;
+        case 130:
+        case 310:
+          _histMultiK0->fill(_histMultiK0->binMean(0), weight);
+          break;
+        case 221:
+          _histMultiEta->fill(_histMultiEta->binMean(0), weight);
+          break;
+        case 331:
+          _histMultiEtaPrime->fill(_histMultiEtaPrime->binMean(0), weight);
+          break;
+        case 411:
+          _histMultiDPlus->fill(_histMultiDPlus->binMean(0), weight);
+          break;
+        case 421:
+          _histMultiD0->fill(_histMultiD0->binMean(0), weight);
+          break;
+        case 511:
+        case 521:
+        case 531:
+          _histMultiBPlus0->fill(_histMultiBPlus0->binMean(0), weight);
+          break;
+        case 9010221:
+          _histMultiF0->fill(_histMultiF0->binMean(0), weight);
+          break;
+        case 113:
+          _histMultiRho->fill(_histMultiRho->binMean(0), weight);
+          break;
+        case 323:
+          _histMultiKStar892Plus->fill(_histMultiKStar892Plus->binMean(0), weight);
+          break;
+        case 313:
+          _histMultiKStar892_0->fill(_histMultiKStar892_0->binMean(0), weight);
+          break;
+        case 333:
+          _histMultiPhi->fill(_histMultiPhi->binMean(0), weight);
+          break;
+        case 413:
+          _histMultiDStar2010Plus->fill(_histMultiDStar2010Plus->binMean(0), weight);
+          break;
+        case 225:
+          _histMultiF2->fill(_histMultiF2->binMean(0), weight);
+          break;
+        case 315:
+          _histMultiK2Star1430_0->fill(_histMultiK2Star1430_0->binMean(0), weight);
+          break;
+        case 2212:
+          _histMultiP->fill(_histMultiP->binMean(0), weight);
+          break;
+        case 3122:
+          _histMultiLambda0->fill(_histMultiLambda0->binMean(0), weight);
+          break;
+        case 3312:
+          _histMultiXiMinus->fill(_histMultiXiMinus->binMean(0), weight);
+          break;
+        case 3334:
+          _histMultiOmegaMinus->fill(_histMultiOmegaMinus->binMean(0), weight);
+          break;
+        case 2224:
+          _histMultiDeltaPlusPlus->fill(_histMultiDeltaPlusPlus->binMean(0), weight);
+          break;
+        case 3114:
+          _histMultiSigma1385Plus->fill(_histMultiSigma1385Plus->binMean(0), weight);
+          break;
+        case 3324:
+          _histMultiXi1530_0->fill(_histMultiXi1530_0->binMean(0), weight);
+          break;
+        case 5122:
+          _histMultiLambdaB0->fill(_histMultiLambdaB0->binMean(0), weight);
+          break;
+        }
+      }
+    }
+
+
+    void init() {
+      _histPtTIn = bookHistogram1D(1, 1, 1);
+      _histPtTOut = bookHistogram1D(2, 1, 1);
+      _histPtSIn = bookHistogram1D(3, 1, 1);
+      _histPtSOut = bookHistogram1D(4, 1, 1);
+      
+      _histRapidityT = bookHistogram1D(5, 1, 1);
+      _histRapidityS = bookHistogram1D(6, 1, 1);
+      _histScaledMom = bookHistogram1D(7, 1, 1);
+      _histLogScaledMom = bookHistogram1D(8, 1, 1);
+      
+      _histPtTOutVsXp = bookProfile1D(9,  1, 1);
+      _histPtVsXp = bookProfile1D(10, 1, 1);    
+      
+      _hist1MinusT = bookHistogram1D(11, 1, 1);
+      _histTMajor = bookHistogram1D(12, 1, 1);
+      _histTMinor = bookHistogram1D(13, 1, 1);
+      _histOblateness = bookHistogram1D(14, 1, 1);
+      
+      _histSphericity = bookHistogram1D(15, 1, 1);
+      _histAplanarity = bookHistogram1D(16, 1, 1);
+      _histPlanarity = bookHistogram1D(17, 1, 1);
+      
+      _histCParam = bookHistogram1D(18, 1, 1);
+      _histDParam = bookHistogram1D(19, 1, 1);
+      
+      _histHemiMassH = bookHistogram1D(20, 1, 1);
+      _histHemiMassL = bookHistogram1D(21, 1, 1);
+      _histHemiMassD = bookHistogram1D(22, 1, 1);
+      
+      _histHemiBroadW = bookHistogram1D(23, 1, 1);
+      _histHemiBroadN = bookHistogram1D(24, 1, 1);
+      _histHemiBroadT = bookHistogram1D(25, 1, 1);
+      _histHemiBroadD = bookHistogram1D(26, 1, 1);
+
+      // Binned in y_cut
+      _histDiffRate2Durham = bookHistogram1D(27, 1, 1);
+      _histDiffRate2Jade = bookHistogram1D(28, 1, 1);
+      _histDiffRate3Durham = bookHistogram1D(29, 1, 1);
+      _histDiffRate3Jade = bookHistogram1D(30, 1, 1);
+      _histDiffRate4Durham = bookHistogram1D(31, 1, 1);
+      _histDiffRate4Jade = bookHistogram1D(32, 1, 1);
+
+      // Binned in cos(chi)
+      _histEEC = bookHistogram1D(33, 1, 1);
+      _histAEEC = bookHistogram1D(34, 1, 1);
+
+      _histMultiCharged = bookHistogram1D(35, 1, 1);
+
+      _histMultiPiPlus = bookHistogram1D(36, 1, 1);
+      _histMultiPi0 = bookHistogram1D(36, 1, 2);
+      _histMultiKPlus = bookHistogram1D(36, 1, 3);
+      _histMultiK0 = bookHistogram1D(36, 1, 4);
+      _histMultiEta = bookHistogram1D(36, 1, 5);
+      _histMultiEtaPrime = bookHistogram1D(36, 1, 6);
+      _histMultiDPlus = bookHistogram1D(36, 1, 7);
+      _histMultiD0 = bookHistogram1D(36, 1, 8);
+      _histMultiBPlus0 = bookHistogram1D(36, 1, 9);
+
+      _histMultiF0 = bookHistogram1D(37, 1, 1);
+
+      _histMultiRho = bookHistogram1D(38, 1, 1);
+      _histMultiKStar892Plus = bookHistogram1D(38, 1, 2);
+      _histMultiKStar892_0 = bookHistogram1D(38, 1, 3);
+      _histMultiPhi = bookHistogram1D(38, 1, 4);
+      _histMultiDStar2010Plus = bookHistogram1D(38, 1, 5);
+
+      _histMultiF2 = bookHistogram1D(39, 1, 1);
+      _histMultiK2Star1430_0 = bookHistogram1D(39, 1, 2);
+
+      _histMultiP = bookHistogram1D(40, 1, 1);
+      _histMultiLambda0 = bookHistogram1D(40, 1, 2);
+      _histMultiXiMinus = bookHistogram1D(40, 1, 3);
+      _histMultiOmegaMinus = bookHistogram1D(40, 1, 4);
+      _histMultiDeltaPlusPlus = bookHistogram1D(40, 1, 5);
+      _histMultiSigma1385Plus = bookHistogram1D(40, 1, 6);
+      _histMultiXi1530_0 = bookHistogram1D(40, 1, 7);
+      _histMultiLambdaB0 = bookHistogram1D(40, 1, 8);
+    }
+
+
+
+    // Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      const double avgNumParts = _weightedTotalPartNum / _passedCutWeightSum;
+
+      normalize(_histPtTIn, avgNumParts);
+      normalize(_histPtTOut, avgNumParts); 
+      normalize(_histPtSIn, avgNumParts);
+      normalize(_histPtSOut, avgNumParts); 
+
+      normalize(_histRapidityT, avgNumParts); 
+      normalize(_histRapidityS, avgNumParts); 
+
+      normalize(_histLogScaledMom, avgNumParts);
+      normalize(_histScaledMom, avgNumParts); 
+
+      scale(_histEEC, 1.0/_passedCutWeightSum);
+      scale(_histAEEC, 1.0/_passedCutWeightSum);
+      scale(_histMultiCharged, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiPiPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiPi0, 1.0/_passedCutWeightSum);
+      scale(_histMultiKPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiK0, 1.0/_passedCutWeightSum);
+      scale(_histMultiEta, 1.0/_passedCutWeightSum);
+      scale(_histMultiEtaPrime, 1.0/_passedCutWeightSum);
+      scale(_histMultiDPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiD0, 1.0/_passedCutWeightSum);
+      scale(_histMultiBPlus0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiF0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiRho, 1.0/_passedCutWeightSum);
+      scale(_histMultiKStar892Plus, 1.0/_passedCutWeightSum);
+      scale(_histMultiKStar892_0, 1.0/_passedCutWeightSum);
+      scale(_histMultiPhi, 1.0/_passedCutWeightSum);
+      scale(_histMultiDStar2010Plus, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiF2, 1.0/_passedCutWeightSum);
+      scale(_histMultiK2Star1430_0, 1.0/_passedCutWeightSum);
+
+      scale(_histMultiP, 1.0/_passedCutWeightSum);
+      scale(_histMultiLambda0, 1.0/_passedCutWeightSum);
+      scale(_histMultiXiMinus, 1.0/_passedCutWeightSum);
+      scale(_histMultiOmegaMinus, 1.0/_passedCutWeightSum);
+      scale(_histMultiDeltaPlusPlus, 1.0/_passedCutWeightSum);
+      scale(_histMultiSigma1385Plus, 1.0/_passedCutWeightSum);
+      scale(_histMultiXi1530_0, 1.0/_passedCutWeightSum);
+      scale(_histMultiLambdaB0, 1.0/_passedCutWeightSum);
+
+      normalize(_hist1MinusT); 
+      normalize(_histTMajor); 
+      normalize(_histTMinor); 
+      normalize(_histOblateness); 
+
+      normalize(_histSphericity); 
+      normalize(_histAplanarity); 
+      normalize(_histPlanarity); 
+
+      normalize(_histHemiMassD); 
+      normalize(_histHemiMassH); 
+      normalize(_histHemiMassL); 
+
+      normalize(_histHemiBroadW); 
+      normalize(_histHemiBroadN); 
+      normalize(_histHemiBroadT); 
+      normalize(_histHemiBroadD); 
+
+      normalize(_histCParam); 
+      normalize(_histDParam); 
+
+      normalize(_histDiffRate2Durham); 
+      normalize(_histDiffRate2Jade); 
+      normalize(_histDiffRate3Durham);
+      normalize(_histDiffRate3Jade); 
+      normalize(_histDiffRate4Durham);
+      normalize(_histDiffRate4Jade); 
+    }
+
+    //@}
+
+
+  private:
+
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalPartNum;
+
+    double _passedCutWeightSum;
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histPtTIn;
+    AIDA::IHistogram1D *_histPtTOut;
+    AIDA::IHistogram1D *_histPtSIn;
+    AIDA::IHistogram1D *_histPtSOut;
+
+    AIDA::IHistogram1D *_histRapidityT;
+    AIDA::IHistogram1D *_histRapidityS;
+
+    AIDA::IHistogram1D *_histScaledMom, *_histLogScaledMom;
+
+    AIDA::IProfile1D   *_histPtTOutVsXp, *_histPtVsXp;
+
+    AIDA::IHistogram1D *_hist1MinusT; 
+    AIDA::IHistogram1D *_histTMajor; 
+    AIDA::IHistogram1D *_histTMinor; 
+    AIDA::IHistogram1D *_histOblateness; 
+
+    AIDA::IHistogram1D *_histSphericity;
+    AIDA::IHistogram1D *_histAplanarity;
+    AIDA::IHistogram1D *_histPlanarity;
+
+    AIDA::IHistogram1D *_histCParam;
+    AIDA::IHistogram1D *_histDParam;
+
+    AIDA::IHistogram1D *_histHemiMassD;
+    AIDA::IHistogram1D *_histHemiMassH;
+    AIDA::IHistogram1D *_histHemiMassL;
+               
+    AIDA::IHistogram1D *_histHemiBroadW;
+    AIDA::IHistogram1D *_histHemiBroadN;
+    AIDA::IHistogram1D *_histHemiBroadT;
+    AIDA::IHistogram1D *_histHemiBroadD;
+
+    AIDA::IHistogram1D *_histDiffRate2Durham;
+    AIDA::IHistogram1D *_histDiffRate2Jade; 
+    AIDA::IHistogram1D *_histDiffRate3Durham;
+    AIDA::IHistogram1D *_histDiffRate3Jade;
+    AIDA::IHistogram1D *_histDiffRate4Durham;
+    AIDA::IHistogram1D *_histDiffRate4Jade;
+
+    AIDA::IHistogram1D *_histEEC, *_histAEEC;
+
+    AIDA::IHistogram1D *_histMultiCharged;
+
+    AIDA::IHistogram1D *_histMultiPiPlus;
+    AIDA::IHistogram1D *_histMultiPi0;
+    AIDA::IHistogram1D *_histMultiKPlus;
+    AIDA::IHistogram1D *_histMultiK0;
+    AIDA::IHistogram1D *_histMultiEta;
+    AIDA::IHistogram1D *_histMultiEtaPrime;
+    AIDA::IHistogram1D *_histMultiDPlus;
+    AIDA::IHistogram1D *_histMultiD0;
+    AIDA::IHistogram1D *_histMultiBPlus0;
+
+    AIDA::IHistogram1D *_histMultiF0;
+
+    AIDA::IHistogram1D *_histMultiRho;
+    AIDA::IHistogram1D *_histMultiKStar892Plus;
+    AIDA::IHistogram1D *_histMultiKStar892_0;
+    AIDA::IHistogram1D *_histMultiPhi;
+    AIDA::IHistogram1D *_histMultiDStar2010Plus;
+
+    AIDA::IHistogram1D *_histMultiF2;
+    AIDA::IHistogram1D *_histMultiK2Star1430_0;
+
+    AIDA::IHistogram1D *_histMultiP;
+    AIDA::IHistogram1D *_histMultiLambda0;
+    AIDA::IHistogram1D *_histMultiXiMinus;
+    AIDA::IHistogram1D *_histMultiOmegaMinus;
+    AIDA::IHistogram1D *_histMultiDeltaPlusPlus;
+    AIDA::IHistogram1D *_histMultiSigma1385Plus;
+    AIDA::IHistogram1D *_histMultiXi1530_0;
+    AIDA::IHistogram1D *_histMultiLambdaB0;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<DELPHI_1996_S3430090> plugin_DELPHI_1996_S3430090;
+
+}

Copied: trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc (from r1802, trunk/src/Analyses/LEP/DELPHI_2002_069_CONF_603.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/DELPHI_2002_069_CONF_603.cc)
@@ -0,0 +1,134 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/InitialQuarks.hh"
+
+
+/// @todo Use inline PID functions instead
+#define IS_PARTON_PDGID(id) ( abs(id) <= 100 && abs(id) != 22 && (abs(id) < 11 || abs(id) > 18) )
+#define IS_BHADRON_PDGID(id) ( ((abs(id)/100)%10 == 5) || (abs(id) >= 5000 && abs(id) <= 5999) )
+
+namespace Rivet {
+
+
+  /// @brief DELPHI b-fragmentation measurement
+  /// @author Hendrik Hoeth
+  class DELPHI_2002_069_CONF_603 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_2002_069_CONF_603() 
+      : Analysis("DELPHI_2002_069_CONF_603")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(InitialQuarks(), "IQF");
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      
+      foreach (const GenParticle* p, particles(e.genEvent())) {
+        const GenVertex* pv = p->production_vertex();
+        const GenVertex* dv = p->end_vertex();
+        if (IS_BHADRON_PDGID(p->pdg_id())) {
+          const double xp = p->momentum().e()/meanBeamMom;
+          
+          // If the B-hadron has a parton as parent, call it primary B-hadron:
+          if (pv) {
+            bool is_primary = false;
+            for (GenVertex::particles_in_const_iterator pp = pv->particles_in_const_begin(); pp != pv->particles_in_const_end() ; ++pp) {
+              if (IS_PARTON_PDGID((*pp)->pdg_id())) is_primary = true;
+            }
+            if (is_primary) {
+              _histXbprim->fill(xp, weight);
+              _histMeanXbprim->fill(_histMeanXbprim->binMean(0), xp, weight);
+            }
+          }
+          
+          // If the B-hadron has no B-hadron as a child, it decayed weakly:
+          if (dv) {
+            bool is_weak = true;
+            for (GenVertex::particles_out_const_iterator pp = dv->particles_out_const_begin() ;
+                 pp != dv->particles_out_const_end() ; ++pp) {
+              if (IS_BHADRON_PDGID((*pp)->pdg_id())) {
+                is_weak = false;
+              }
+            }
+            if (is_weak) {
+              _histXbweak->fill(xp, weight);
+              _histMeanXbweak->fill(_histMeanXbweak->binMean(0), xp, weight);
+            }
+          }
+          
+        }
+      }
+    }
+      
+    
+    /// Book histograms      
+    void init() {
+      _histXbprim     = bookHistogram1D(1, 1, 1);
+      _histXbweak     = bookHistogram1D(2, 1, 1);
+      _histMeanXbprim = bookProfile1D(4, 1, 1);
+      _histMeanXbweak = bookProfile1D(5, 1, 1);
+    }
+    
+
+    // Finalize
+    void finalize() {
+      normalize(_histXbprim);
+      normalize(_histXbweak);
+    }
+
+
+  private:
+
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+
+    AIDA::IHistogram1D *_histXbprim;
+    AIDA::IHistogram1D *_histXbweak;
+
+    AIDA::IProfile1D *_histMeanXbprim;
+    AIDA::IProfile1D *_histMeanXbweak;
+
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<DELPHI_2002_069_CONF_603> plugin_DELPHI_2002_069_CONF_603;
+
+}

Copied: trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc (from r1802, trunk/src/Analyses/LEP/DELPHI_2003_WUD_03_11.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/DELPHI_2003_WUD_03_11.cc)
@@ -0,0 +1,204 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+
+  /**
+   * @brief DELPHI 4-jet angular distributions
+   * @author Hendrik Hoeth
+   *
+   * This is Hendrik Hoeth's Diploma thesis, measuring the 4-jet angular
+   * distributions at LEP-1.
+   *
+   *
+   * @par Run conditions
+   *
+   * @arg LEP1 beam energy: \f$ \sqrt{s} = \$f 91.2 GeV
+   * @arg Run with generic QCD events.
+   * @arg No \f$ p_\perp^\text{min} \f$ cutoff is required
+   */
+  class DELPHI_2003_WUD_03_11 : public Analysis {
+  public:
+
+    /// Constructor
+    DELPHI_2003_WUD_03_11()
+      : Analysis("DELPHI_2003_WUD_03_11")
+    {
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+      addProjection(FastJets(cfs, FastJets::JADE, 0.7), "JadeJets");
+      addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets");
+      _numdurjets = 0;
+      _numjadejets = 0;
+    }
+    
+
+
+    /// @name Jet angle calculator functions
+    /// @todo These shouldn't be object methods, as they have no state!
+    //@{
+    
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils?
+    double calc_BZ(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p12 = cross( momentum3(jets[0]), momentum3(jets[1]));
+      Vector3 p34 = cross( momentum3(jets[2]), momentum3(jets[3]));
+      return dot(p12,p34) / (p12.mod()*p34.mod());
+    }
+
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_KSW(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p13 = cross( momentum3(jets[0]), momentum3(jets[2]));
+      Vector3 p24 = cross( momentum3(jets[1]), momentum3(jets[3]));
+      Vector3 p14 = cross( momentum3(jets[0]), momentum3(jets[3]));
+      Vector3 p23 = cross( momentum3(jets[1]), momentum3(jets[2]));
+      return cos (0.5*( acos (dot(p14,p23) / (p14.mod()*p23.mod())) +
+                        acos (dot(p13,p24) / (p13.mod()*p24.mod())) ));
+    }
+    
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_NR(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p12 = momentum3(jets[0]) - momentum3(jets[1]);
+      Vector3 p34 = momentum3(jets[2]) - momentum3(jets[3]);
+      return dot(p12,p34) / (p12.mod()*p34.mod());
+    }
+
+    /// @todo Use Jet or FourMomentum interface rather than PseudoJet
+    /// @todo Move to utils? 
+    double calc_ALPHA34(const vector<fastjet::PseudoJet>& jets) {
+      assert(jets.size() == 4);
+      Vector3 p3 = momentum3(jets[2]);
+      Vector3 p4 = momentum3(jets[3]);
+      return dot(p3,p4) / (p3.mod()*p4.mod());
+    }
+
+    //@}
+
+
+
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed multiplicity cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      
+      // Jets
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      vector<fastjet::PseudoJet> jets_durham;
+      if (durjet.clusterSeq()) {
+        jets_durham = fastjet::sorted_by_E(durjet.clusterSeq()->exclusive_jets_ycut(0.008));
+        if (jets_durham.size() == 4) {
+          _histDurhamBZ->fill(fabs(calc_BZ(jets_durham)), weight);
+          _histDurhamKSW->fill(calc_KSW(jets_durham), weight);
+          _histDurhamNR->fill(fabs(calc_NR(jets_durham)), weight);
+          _histDurhamALPHA34->fill(calc_ALPHA34(jets_durham), weight);
+        }
+        if (durjet.clusterSeq()->exclusive_ymerge(3) > 0.008 && 
+            durjet.clusterSeq()->exclusive_ymerge(4) < 0.008) {
+          _numdurjets++;
+        }
+      }
+      
+      const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
+      vector<fastjet::PseudoJet> jets_jade;
+      if (jadejet.clusterSeq()) {
+        jets_jade = fastjet::sorted_by_E(jadejet.clusterSeq()->exclusive_jets_ycut(0.015));
+        if (jets_jade.size() == 4) {
+          _histJadeBZ->fill(fabs(calc_BZ(jets_jade)), weight);
+          _histJadeKSW->fill(calc_KSW(jets_jade), weight);
+          _histJadeNR->fill(fabs(calc_NR(jets_jade)), weight);
+          _histJadeALPHA34->fill(calc_ALPHA34(jets_jade), weight);
+        }
+        if (jadejet.clusterSeq()->exclusive_ymerge(3) > 0.015 && 
+            jadejet.clusterSeq()->exclusive_ymerge(4) < 0.015) {
+          _numjadejets++;
+        }
+      }
+      
+    }
+    
+    
+    
+    void init() {
+      _histDurhamBZ      = bookHistogram1D(1, 1, 1);
+      _histDurhamKSW     = bookHistogram1D(2, 1, 1);
+      _histDurhamNR      = bookHistogram1D(3, 1, 1);
+      _histDurhamALPHA34 = bookHistogram1D(4, 1, 1);
+      _histJadeBZ        = bookHistogram1D(1, 2, 1);
+      _histJadeKSW       = bookHistogram1D(2, 2, 1);
+      _histJadeNR        = bookHistogram1D(3, 2, 1);
+      _histJadeALPHA34   = bookHistogram1D(4, 2, 1);
+    }
+    
+    
+    
+    // Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      
+      getLog() << Log::INFO << "Number of Durham jets = " << _numdurjets << endl;
+      getLog() << Log::INFO << "Number of Jade jets   = " << _numjadejets << endl;
+      normalize(_histDurhamBZ      , 0.0785);
+      normalize(_histDurhamKSW     , 0.0785);
+      normalize(_histDurhamNR      , 0.0785);
+      normalize(_histDurhamALPHA34 , 0.0785);
+      normalize(_histJadeBZ        , 0.0277);
+      normalize(_histJadeKSW       , 0.0277);
+      normalize(_histJadeNR        , 0.0277);
+      normalize(_histJadeALPHA34   , 0.0277);
+    }
+
+    //@}
+
+
+  private:
+
+    int _numdurjets;
+    int _numjadejets;
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histDurhamBZ;
+    AIDA::IHistogram1D *_histDurhamKSW;
+    AIDA::IHistogram1D *_histDurhamNR;
+    AIDA::IHistogram1D *_histDurhamALPHA34;
+    AIDA::IHistogram1D *_histJadeBZ;
+    AIDA::IHistogram1D *_histJadeKSW;
+    AIDA::IHistogram1D *_histJadeNR;
+    AIDA::IHistogram1D *_histJadeALPHA34;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<DELPHI_2003_WUD_03_11> plugin_DELPHI_2003_WUD_03_11;
+
+}

Copied: trunk/src/Analyses/E735_1998_S3905616.cc (from r1802, trunk/src/Analyses/Misc/E735_1998_S3905616.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/E735_1998_S3905616.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Misc/E735_1998_S3905616.cc)
@@ -0,0 +1,63 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+
+  class E735_1998_S3905616 : public Analysis {
+  public:
+    
+    /// Constructor
+    E735_1998_S3905616()
+      : Analysis("E735_1998_S3905616") {
+      setBeams(PROTON, ANTIPROTON);
+      const ChargedFinalState cfs;
+      addProjection(cfs, "FS");
+    }
+    
+
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      _hist_multiplicity = bookHistogram1D(1, 1, 1);
+    }
+
+
+    void analyze(const Event& event) {
+      const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Get the event weight
+      const double weight = event.weight();
+      
+      // Fill histo of charged multiplicity distribution
+      _hist_multiplicity->fill(numParticles, weight);
+    }
+    
+    
+    void finalize() {
+      normalize(_hist_multiplicity);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_hist_multiplicity;
+    //@}
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<E735_1998_S3905616> plugin_E735_1998_S3905616;
+  
+}

Copied: trunk/src/Analyses/ExampleAnalysis.cc (from r1802, trunk/src/Analyses/Example/ExampleAnalysis.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/ExampleAnalysis.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Example/ExampleAnalysis.cc)
@@ -0,0 +1,129 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/Multiplicity.hh"
+#include "Rivet/Projections/Thrust.hh"
+#include "Rivet/Projections/Sphericity.hh"
+
+namespace Rivet {
+  
+  
+  /// @brief Just measures a few random things as an example.
+  class ExampleAnalysis : public Analysis {
+  public:
+    
+    /// Constructor
+    ExampleAnalysis()
+      : Analysis("EXAMPLE")
+    {
+      const FinalState cnfs(-4, 4, 2*GeV);
+      const ChargedFinalState cfs(-4, 4, 2*GeV);
+      addProjection(cnfs, "FS");
+      addProjection(cfs, "CFS");
+      addProjection(FastJets(cnfs, FastJets::KT, 0.7), "Jets");
+      addProjection(Multiplicity(cfs), "CMult");
+      addProjection(Multiplicity(cnfs), "CNMult");
+      addProjection(Thrust(cfs), "Thrust");
+      addProjection(Sphericity(cfs), "Sphericity");
+    }
+    
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      // Using histogram auto-booking is preferable if there are comparison datasets in HepData.
+      // Since this is just a demo analysis, there is no associated paper!
+
+      _histTot         = bookHistogram1D("TotalMult", 100, -0.5, 99.5);
+      _histChTot       = bookHistogram1D("TotalChMult", 50, -1.0, 99.0);
+      _histHadrTot     = bookHistogram1D("HadrTotalMult", 100, -0.5, 99.5);
+      _histHadrChTot   = bookHistogram1D("HadrTotalChMult", 50, -1.0, 99.0);
+
+      double edges[11] = { 0.5, 0.6, 0.7, 0.80, 0.85, 0.9, 0.92, 0.94, 0.96, 0.98, 1.0 };
+      vector<double> vedges(edges, edges+11);
+      _histThrust      = bookHistogram1D("Thrust", vedges);
+      _histMajor       = bookHistogram1D("Major", 10, 0.0, 0.6);
+      _histSphericity  = bookHistogram1D("Sphericity", 10, 0.0, 0.8);
+      _histAplanarity  = bookHistogram1D("Aplanarity", 10, 0.0, 0.3);
+    }
+
+
+    /// Do the analysis
+    void analyze(const Event& event) {
+      // Analyse and print some info
+      const Multiplicity& cm = applyProjection<Multiplicity>(event, "CMult");
+      const Multiplicity& cnm = applyProjection<Multiplicity>(event, "CNMult");
+      getLog() << Log::DEBUG << "Total multiplicity = " << cnm.totalMultiplicity()  << endl;
+      getLog() << Log::DEBUG << "Total charged multiplicity = " << cm.totalMultiplicity()   << endl;
+      getLog() << Log::DEBUG << "Hadron multiplicity = " << cnm.hadronMultiplicity() << endl;
+      getLog() << Log::DEBUG << "Hadron charged multiplicity = " << cm.hadronMultiplicity()  << endl;
+      
+      const Thrust& t = applyProjection<Thrust>(event, "Thrust");
+      getLog() << Log::DEBUG << "Thrust = " << t.thrust() << endl;
+      
+      const Sphericity& s = applyProjection<Sphericity>(event, "Sphericity");
+      getLog() << Log::DEBUG << "Sphericity = " << s.sphericity() << endl;
+      getLog() << Log::DEBUG << "Aplanarity = " << s.aplanarity() << endl;
+      
+      size_t num_b_jets = 0;
+      const Jets jets = applyProjection<FastJets>(event, "Jets").jets();
+      foreach (const Jet& j, jets) {
+        if (j.containsBottom()) ++num_b_jets;
+      }
+      getLog() << Log::DEBUG << "#B-jets = " << num_b_jets << endl;
+      
+      // Fill histograms
+      const double weight = event.weight();
+      _histTot->fill(cnm.totalMultiplicity(), weight);
+      _histChTot->fill(cm.totalMultiplicity(), weight);
+      _histHadrTot->fill(cnm.hadronMultiplicity(), weight);
+      _histHadrChTot->fill(cm.hadronMultiplicity(), weight);
+      _histThrust->fill(t.thrust(), weight);
+      _histMajor->fill(t.thrustMajor(), weight);
+      _histSphericity->fill(s.sphericity(), weight);
+      _histAplanarity->fill(s.aplanarity(), weight);
+    }
+    
+    
+    /// Finalize
+    void finalize() { 
+      normalize(_histTot);
+      normalize(_histChTot);
+      normalize(_histHadrTot);
+      normalize(_histHadrChTot);
+      normalize(_histThrust);
+      normalize(_histMajor);
+      normalize(_histSphericity);
+      normalize(_histAplanarity);
+    }
+    //@}
+
+
+  private:
+    
+    //@{
+    /// Histograms
+    AIDA::IHistogram1D* _histTot;
+    AIDA::IHistogram1D* _histChTot;
+    AIDA::IHistogram1D* _histHadrTot;
+    AIDA::IHistogram1D* _histHadrChTot;
+    AIDA::IHistogram1D* _histThrust;
+    AIDA::IHistogram1D* _histMajor;
+    AIDA::IHistogram1D* _histSphericity;
+    AIDA::IHistogram1D* _histAplanarity;
+    //@}
+
+  };
+    
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<ExampleAnalysis> plugin_ExampleAnalysis;
+
+}

Copied: trunk/src/Analyses/ExampleTree.cc (from r1802, trunk/src/Analyses/Example/ExampleTree.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/ExampleTree.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Example/ExampleTree.cc)
@@ -0,0 +1,296 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedLeptons.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+// ROOT stuff
+#ifdef HAVE_ROOT
+#include "TTree.h"
+#include "TFile.h"
+#include "TString.h"
+#endif
+
+namespace Rivet {
+
+
+  /// @brief Book and fill a ROOT tree with simulated data.
+  ///
+  /// This does some things, e.g. access parton level information, which
+  /// are not recommended in Rivet analyses, since the information is 
+  /// unphysical and so cannot be compared to data, and also may be generator dependent.
+  /// 
+  class ExampleTree : public Analysis {
+  public:
+
+    #ifndef HAVE_ROOT
+    
+    ExampleTree() : Analysis("EXAMPLETREE") { }
+    void init() {
+      getLog() << Log::WARN << "Rivet was not compiled against ROOT. ExampleTree will do nothing." << endl;
+    }
+    void analyze(const Event& event) { }
+    void finalize() { }
+
+
+    #else
+
+
+    ExampleTree() 
+      : Analysis("EXAMPLETREE")
+    {
+      const FinalState fs(-4.0, 4.0, 0.0*GeV);
+      addProjection(fs, "FS");
+      addProjection(ChargedLeptons(fs), "ChLeptons");
+      addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
+      
+      /// Veto neutrinos, antineutrinos and LSP
+      VetoedFinalState vfs(fs);
+      vfs
+        .addVetoDetail(NU_E, 10.0*GeV, 50.0*GeV)
+        .addVetoPairId(NU_MU)
+        .addVetoPairId(NU_TAU)
+        .addVetoId(1000022); // LSP
+      addProjection(vfs, "VFS");
+      addProjection(TotalVisibleMomentum(vfs), "TotalVisMom");
+      
+      ZFinder zs(fs, ELECTRON, 80*GeV, 100*GeV, 0.2);
+      addProjection(zs, "Zs");
+    }
+    
+    
+    void init() {
+      // Choose cuts
+      _jet_pt_cut = 20*GeV;
+      _subj_pt_cut = 20*GeV;
+      _lepton_pt_cut = 20*GeV;
+      _store_partons = true;
+      
+      _treeFileName = "rivetTree.root";
+      
+      // Create a file for the Tree
+      _treeFile = new TFile(_treeFileName, "recreate");
+      
+      // Book the ntuple.
+      _rivetTree = new TTree("Rivet Tree", "Rivet Example Tree");
+      
+      // Event number 
+      _rivetTree->Branch("nevt", &_nevt, "nevt/I");
+      
+      // Vector bosons
+      _rivetTree->Branch("nvb", &_nvb, "nvb/I");
+      _rivetTree->Branch("vbtype", &_vbtype, "vbtype[nvb]/I");
+      _rivetTree->Branch("vbvec", &_vbvec, "vbvec[nvb][4]/F");
+      
+      _rivetTree->Branch("njet", &_njet, "njet/I");
+      _rivetTree->Branch("vjet", &_vjet, "vjet[njet][4]/F");
+      
+      _rivetTree->Branch("nsub", &_nsub, "nsub/I");
+      _rivetTree->Branch("sjet3", &_sjet3, "sjet3[nsub][4]/F");
+      _rivetTree->Branch("ysubsj", &_ysubsj, "ysubsj[nsub][4]/F");
+      
+      _rivetTree->Branch("nlep", &_nlep, "nlep/I");
+      _rivetTree->Branch("vlep", &_vlep, "vlep[nlep][4]/F");
+      _rivetTree->Branch("leptype", &_leptype, "leptype[nlep][3]/F");
+      
+      _rivetTree->Branch("npart", &_npart, "npart/I");
+      _rivetTree->Branch("ppart", &_ppart, "ppart[npart][4]/F");
+      _rivetTree->Branch("pid", &_pid, "pid[npart]/I");
+      _rivetTree->Branch("mo", &_mo, "mo[npart]/I");  // first mother.
+      
+      _rivetTree->Branch("esumr", &_esumr, "esumr[4]/F");
+    }
+    
+
+    // Do the analysis
+    void analyze(const Event& event) {
+      const GenEvent& ev = event.genEvent();
+      _nevt = ev.event_number();
+      
+      // Get the vector bosons
+      _nvb = 0;
+      const FinalState& zs = applyProjection<FinalState>(event, "Zs");
+      foreach (const Particle& p, zs.particles()) {
+        const FourMomentum p4 = p.momentum();
+        _vbvec[_nvb][0] = p4.E()/GeV;
+        _vbvec[_nvb][1] = p4.px()/GeV;
+        _vbvec[_nvb][2] = p4.py()/GeV;
+        _vbvec[_nvb][3] = p4.pz()/GeV;
+        _vbtype[_nvb]   = 1;
+        ++_nvb;
+      }
+      
+      // Get the partons. This is generator-dependent and should not be
+      // used in normal analyses.
+      _npart = 0;
+      if (_store_partons) {
+        for (GenEvent::particle_const_iterator pi = event.genEvent().particles_begin(); 
+             pi != event.genEvent().particles_end(); ++pi ) {
+          // Only include particles which are documentation line (status >1) 
+          // The result/meaning will be generator dependent.
+          if ( (*pi)->status() >= 2 ) {
+            const FourMomentum p4 = (*pi)->momentum();
+            _ppart[_npart][1] = p4.px();
+            _ppart[_npart][2] = p4.py();
+            _ppart[_npart][3] = p4.pz();
+            _ppart[_npart][0] = p4.E();
+            _pid[_npart] = (*pi)->pdg_id();
+            const GenVertex* vertex = (*pi)->production_vertex();
+            // Get the first mother
+            if (vertex) {
+              if (vertex->particles_in_size()>0) {
+                GenVertex::particles_in_const_iterator p1 = vertex->particles_in_const_begin();
+                _mo[_npart] = (*p1)->pdg_id();
+              } else {
+                _mo[_npart] = 0;
+              }
+            } else {
+              _mo[_npart] = 0;
+            }
+            getLog() << Log::DEBUG << _npart << ":" << _pid[_npart] << endl;
+            ++_npart;
+          }
+        }
+      }
+      
+      
+      // Get the jets in decreasing pT order.
+      const FastJets& jets = applyProjection<FastJets>(event, "Jets");
+      PseudoJets jetList = jets.pseudoJetsByPt();
+      _njet = 0;
+      _nsub = 0;
+      foreach (const fastjet::PseudoJet& j, jetList) {
+        if (j.perp() > _jet_pt_cut) {
+          _vjet[_njet][0] = j.e()/GeV;
+          _vjet[_njet][1] = j.px()/GeV;
+          _vjet[_njet][2] = j.py()/GeV;
+          _vjet[_njet][3] = j.pz()/GeV;
+          if (j.perp() > _subj_pt_cut) {
+            _sjet3[_nsub][0] = j.e()/GeV;
+            _sjet3[_nsub][1] = j.px()/GeV;
+            _sjet3[_nsub][2] = j.py()/GeV;
+            _sjet3[_nsub][3] = j.pz()/GeV;
+            const vector<double> ys = jets.ySubJet(j);
+            for (size_t i = 0; i < 4; ++i){
+              if (ys.size() > i) {
+                _ysubsj[_nsub][i] = ys.at(i);
+              } else {
+                _ysubsj[_nsub][i] = 0;
+              }
+            }
+            ++_nsub;	 
+          }
+          ++_njet;
+        }
+      }
+      
+      // Loop over leptons
+      _nlep = 0;
+      const ChargedLeptons& cl = applyProjection<ChargedLeptons>(event, "ChLeptons");
+      foreach (const Particle& p, cl.chargedLeptons()) {
+        const FourMomentum p4 = p.momentum();
+        if (p4.pT() > _lepton_pt_cut) {
+          _vlep[_nlep][0] = p4.E()/GeV;
+          _vlep[_nlep][1] = p4.px()/GeV;
+          _vlep[_nlep][2] = p4.py()/GeV;
+          _vlep[_nlep][3] = p4.pz()/GeV;
+          ++_nlep;
+        }
+      }
+      
+      // Missing Et/total energy
+      const TotalVisibleMomentum& tvm = applyProjection<TotalVisibleMomentum>(event, "TotalVisMom");
+      _esumr[0] = tvm.momentum().E()/GeV;
+      _esumr[1] = tvm.momentum().px()/GeV;
+      _esumr[2] = tvm.momentum().py()/GeV;
+      _esumr[3] = tvm.momentum().pz()/GeV;
+      
+      // Finally fill the tree
+      _rivetTree->Fill();
+    }
+    
+    
+    // Finalize
+    void finalize() { 
+      // Write the tree to file.
+      _rivetTree->Write();
+    }
+    
+    //@}
+
+
+  private:
+
+    /// The tree
+    TTree* _rivetTree;
+    
+    /// The file for the Tree
+    TFile* _treeFile;
+
+    /// The filename
+    TString _treeFileName;
+
+
+    /// @name The ntuple variables.
+    //@{
+    /// Event number
+    int _nevt;            
+
+    /// Number of W bosons
+    int _nvb;             
+    /// 4 momentum of W bosons.
+    float _vbvec[8][4];
+    /// Type (i.e. decay mode) of W bosons.
+    int _vbtype[8]; 
+
+    /// Number of jets
+    int _njet; 
+    /// Four momentum of the jets
+    float _vjet[50][4]; 
+
+    /// Number of jets for which the subjet analysis was performed.
+    int _nsub; 
+    /// Four vector of jets for which we found subjets.
+    float _sjet3[200][4];
+    /// y 1->2, 2->3, 3->4, 4->5 for the above jets.
+    float _ysubsj[200][4];
+
+    /// Number of leptons
+    int _nlep;
+    /// Lepton types
+    int _leptype[150][3];
+    float _vlep[150][4];
+
+    /// Number of partons
+    int _npart; 
+    float _ppart[4000][4];
+    int _pid[4000];
+    int _mo[4000];
+
+    /// Total visible momentum
+    float _esumr[4];
+    //@}
+
+    /// Minimum pt of jets which will go into the tree.
+    int _jet_pt_cut;
+
+    /// Minimum pt of jets which will have y evaluated and stored.
+    int _subj_pt_cut;
+
+    /// Minimum pt of charged leptons which will go into the tree.
+    int _lepton_pt_cut;
+
+    /// Store the partons or not?
+    bool _store_partons;
+
+    #endif
+
+  };
+
+  
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<ExampleTree> plugin_ExampleTree;
+
+}

Copied: trunk/src/Analyses/H1_1994_S2919893.cc (from r1802, trunk/src/Analyses/HERA/H1_1994_S2919893.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/H1_1994_S2919893.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/HERA/H1_1994_S2919893.cc)
@@ -0,0 +1,258 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh" 
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Math/Constants.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/DISKinematics.hh"
+
+namespace Rivet {
+
+  /// @brief H1 energy flow and charged particle spectra
+  /// @author Peter Richardson
+  /// Based on the equivalent HZTool analysis
+  class H1_1994_S2919893 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_1994_S2919893()
+      : Analysis("H1_1994_S2919893")
+    {
+      setBeams(ELECTRON, PROTON);
+      addProjection(DISLepton(), "Lepton");
+      addProjection(DISKinematics(), "Kinematics");
+      addProjection(FinalState(), "FS");
+    }
+    
+
+
+    /// @name Analysis methods
+    //@{
+    
+    void analyze(const Event& event) {
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const DISLepton& dl = applyProjection<DISLepton>(event,"Lepton");
+      
+      // Get the DIS kinematics
+      double x  = dk.x();
+      double w2 = dk.W2();
+      double w = sqrt(w2);
+
+      // Momentum of the scattered lepton
+      FourMomentum leptonMom = dl.out().momentum();
+      double ptel = pT(leptonMom);
+      double enel = leptonMom.E();
+      double thel = leptonMom.angle(dk.beamHadron().momentum())/degree;
+      
+      // Extract the particles other than the lepton
+      ParticleVector particles;
+      particles.reserve(fs.particles().size());
+      const GenParticle& dislepGP = dl.out().genParticle();
+      foreach (const Particle& p, fs.particles()) {
+        const GenParticle& loopGP = p.genParticle(); 
+        if (&loopGP == &dislepGP) continue;
+        particles.push_back(p);
+      }
+      
+      // Cut on the forward energy
+      double efwd = 0.0;
+      foreach (const Particle& p, particles) {
+        double th = p.momentum().angle(dk.beamHadron().momentum())/degree;
+        if (th > 4.4 && th < 15.) {
+          efwd += p.momentum().E();
+        }
+      }
+      
+      // Apply the cuts
+      // Lepton energy and angle, w2 and forward energy
+      getLog()<<Log::DEBUG<<"enel/GeV = "<<enel/GeV<<", thel = "<<thel<<", w2 = "<<w2<<", efwd/GeV = "<<efwd/GeV<<std::endl;
+      bool cut = enel/GeV > 14. && thel > 157. && thel < 172.5 && w2 >= 3000. && efwd/GeV > 0.5;
+      if (!cut) vetoEvent;
+      
+      // Weight of the event
+      const double weight = event.weight();
+      // weights for x<1e-3 and x>1e-3
+      if (x < 1e-3) {
+        _wEnergy.first  += weight;
+      } else {
+        _wEnergy.second += weight;
+      }
+      
+      // Boost to hadronic CM
+      const LorentzTransform hcmboost = dk.boostHCM();
+      // Loop over the particles
+      long ncharged(0);
+      for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
+        const Particle& p = particles[ip1];
+        
+        double th = p.momentum().angle(dk.beamHadron().momentum()) / degree;
+        // Boost momentum to lab
+        const FourMomentum hcmMom = hcmboost.transform(p.momentum());
+        // Angular cut
+        if (th <= 4.4) continue;
+        
+        // Energy flow histogram
+        double et = fabs(Et(hcmMom));
+        double eta = -hcmMom.pseudorapidity(); 
+        if (x < 1e-3) {
+          _histEnergyFlowLowX ->fill(eta, et*weight);
+        } else {
+          _histEnergyFlowHighX->fill(eta, et*weight);
+        }
+        if (PID::threeCharge(p.pdgId()) != 0) {
+          /// @todo Use units in w comparisons... what are the units?
+          if (w > 50. && w <= 200.) {
+            double xf= -2 * hcmMom.z() / w;
+            double pt2 = pT2(hcmMom);
+            if (w > 50. && w <= 100.) {
+              _histSpectraW77 ->fill(xf, weight); 
+            } else if (w > 100. && w <= 150.) {
+              _histSpectraW122->fill(xf, weight);
+            } else if (w > 150. && w <= 200.) {
+              _histSpectraW169->fill(xf, weight);
+            }
+            _histSpectraW117->fill(xf, weight);
+            /// @todo Is this profile meant to be filled with 2 weight factors?
+            _histPT2->fill(xf, pt2*weight/GeV2, weight);
+            ++ncharged;
+          }
+        }
+
+
+        // Energy-energy correlation
+        if (th <= 8.) continue;
+        double phi1 = p.momentum().azimuthalAngle(ZERO_2PI);
+        double eta1 = p.momentum().pseudorapidity();
+        double et1 = fabs(Et(p.momentum()));
+        for (size_t ip2 = ip1+1; ip2 < particles.size(); ++ip2) {
+          const Particle& p2 = particles[ip2];
+
+          //double th2 = beamAngle(p2.momentum(), order);
+          double th2 = p2.momentum().angle(dk.beamHadron().momentum()) / degree;
+          if (th2 <= 8.) continue;
+          double phi2 = p2.momentum().azimuthalAngle(ZERO_2PI);
+
+          /// @todo Use angle function
+          double deltaphi = phi1 - phi2;
+          if (fabs(deltaphi) > PI) 
+            deltaphi = fabs(fabs(deltaphi) - TWOPI);
+          double eta2 = p2.momentum().pseudorapidity();
+          double omega = sqrt(sqr(eta1-eta2) + sqr(deltaphi));
+          double et2 = fabs(Et(p2.momentum()));
+          double wt = et1*et2 / sqr(ptel) * weight;
+          if(x < 1e-3) {
+            _histEECLowX ->fill(omega, wt);
+          } else {
+            _histEECHighX->fill(omega,wt);
+          }
+        }
+      }
+
+      // Factors for normalization
+      if (w > 50. && w <= 200.) {
+        if (w <= 100.) {
+          _w77.first  += ncharged*weight;
+          _w77.second += weight;
+        } else if (w <= 150.) {
+          _w122.first  += ncharged*weight;
+          _w122.second += weight;
+        } else {
+          _w169.first  += ncharged*weight;
+          _w169.second += weight;
+        }
+        _w117.first  += ncharged*weight;
+        _w117.second += weight;
+      }
+    }
+
+
+
+    void init() {
+      _w77  = make_pair(0.0, 0.0);
+      _w122 = make_pair(0.0, 0.0);
+      _w169 = make_pair(0.0, 0.0);
+      _w117 = make_pair(0.0, 0.0);
+      _wEnergy = make_pair(0.0, 0.0);
+
+      /// @todo What is "N"?
+      _histEnergyFlowLowX =  bookHistogram1D(1, 1, 1);
+      _histEnergyFlowHighX = bookHistogram1D(1, 1, 2);
+
+      _histEECLowX = bookHistogram1D(2, 1, 1);
+      _histEECHighX = bookHistogram1D(2, 1, 2);
+
+      /// @todo Add cross-section units to label
+      _histSpectraW77 = bookHistogram1D(3, 1, 1);
+      _histSpectraW122 = bookHistogram1D(3, 1, 2);
+      _histSpectraW169 = bookHistogram1D(3, 1, 3);
+      _histSpectraW117 = bookHistogram1D(3, 1, 4);
+
+      _histPT2 = bookProfile1D(4, 1, 1);
+    }
+
+
+    /// Finalize
+    void finalize() { 
+      // Normalize inclusive single particle distributions to the average number 
+      // of charged particles per event.
+      double avgNumParts = _w77.first/_w77.second;
+      normalize(_histSpectraW77, avgNumParts);
+
+      avgNumParts = _w122.first/_w122.second;
+      normalize(_histSpectraW122, avgNumParts);
+
+      avgNumParts = _w169.first/_w169.second;
+      normalize(_histSpectraW169, avgNumParts);
+
+      avgNumParts = _w117.first/_w117.second;
+      normalize(_histSpectraW117, avgNumParts);
+
+      scale(_histEnergyFlowLowX , 1./_wEnergy.first );
+      scale(_histEnergyFlowHighX, 1./_wEnergy.second);
+
+      scale(_histEECLowX , 1./_wEnergy.first );
+      scale(_histEECHighX, 1./_wEnergy.second); 
+    }
+
+
+    //@}
+
+
+  private:
+
+    /**
+     *  Polar angle with right direction of the beam
+     */
+    inline double beamAngle(const FourVector& v, const bool & order) {
+      double thel = v.polarAngle()/degree;
+      if(thel<0.) thel+=180.;
+      if(!order) thel = 180.-thel;
+      return thel;
+    }
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D *_histEnergyFlowLowX;
+    AIDA::IHistogram1D *_histEnergyFlowHighX;
+    AIDA::IHistogram1D *_histEECLowX;
+    AIDA::IHistogram1D *_histEECHighX;
+    AIDA::IHistogram1D *_histSpectraW77;
+    AIDA::IHistogram1D *_histSpectraW122;
+    AIDA::IHistogram1D *_histSpectraW169;
+    AIDA::IHistogram1D *_histSpectraW117;
+    AIDA::IProfile1D *_histPT2;
+    //@}
+
+    /// @name storage of weight to calculate averages for normalisation
+    //@{
+    pair<double,double> _w77,_w122,_w169,_w117,_wEnergy;
+    //@}
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<H1_1994_S2919893> plugin_H1_1994_S2919893;
+
+}

Copied: trunk/src/Analyses/H1_1995_S3167097.cc (from r1802, trunk/src/Analyses/HERA/H1_1995_S3167097.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/H1_1995_S3167097.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/HERA/H1_1995_S3167097.cc)
@@ -0,0 +1,156 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FinalStateHCM.hh"
+#include "Rivet/Projections/CentralEtHCM.hh"
+
+namespace Rivet {
+
+
+  /// @brief Measures energy flow in DIS? To be checked!
+  /// @todo Check this analysis!
+  /// @author Leif Lonnblad
+  class H1_1995_S3167097 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_1995_S3167097() 
+      : Analysis("H1_1995_S3167097")
+    { 
+      setBeams(ELECTRON, PROTON);
+      const DISKinematics& diskin = addProjection(DISKinematics(), "Kinematics");
+      const FinalStateHCM& fshcm = addProjection(FinalStateHCM(diskin), "FS");
+      addProjection(CentralEtHCM(fshcm), "Y1HCM");
+      //addCut("x", MORE_EQ, _xmin);
+      //addCut("x", LESS_EQ, _xmax);
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _hEtFlow = vector<AIDA::IHistogram1D *>(_nbin);
+      _hEtFlowStat = vector<AIDA::IHistogram1D *>(_nbin);
+      _nev = vector<double>(_nbin);
+      /// @todo Automate this sort of thing so that the analysis code is more readable.
+      for (size_t i = 0; i < _nbin; ++i) {
+        string istr(1, char('1' + i));
+        _hEtFlow[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
+        _hEtFlowStat[i] = bookHistogram1D(istr, _nb, _xmin, _xmax);
+      }
+      _hAvEt = bookHistogram1D("21tmp", _nbin, 1.0, 10.0);
+      _hAvX  = bookHistogram1D("22tmp", _nbin, 1.0, 10.0);
+      _hAvQ2 = bookHistogram1D("23tmp", _nbin, 1.0, 10.0);
+      _hN    = bookHistogram1D("24", _nbin, 1.0, 10.0);
+    }
+    
+    
+    /// Calculate the bin number from the DISKinematics projection  
+    int _getbin(const DISKinematics& dk) {
+      if ( dk.Q2() > 5.0*GeV2 && dk.Q2() <= 10.0*GeV2 ) {
+        if ( dk.x() > 0.0001 && dk.x() <= 0.0002 )
+          return 0;
+        else if ( dk.x() > 0.0002 && dk.x() <= 0.0005 && dk.Q2() > 6.0*GeV2 )
+          return 1;
+      }
+      else if ( dk.Q2() > 10.0*GeV2 && dk.Q2() <= 20.0*GeV2 ){
+        if ( dk.x() > 0.0002 && dk.x() <= 0.0005 )
+          return 2;
+        else if ( dk.x() > 0.0005 && dk.x() <= 0.0008 )
+          return 3;
+        else if ( dk.x() > 0.0008 && dk.x() <= 0.0015 )
+          return 4;
+        else if ( dk.x() > 0.0015 && dk.x() <= 0.0040 )
+          return 5;
+      }
+      else if ( dk.Q2() > 20.0*GeV2 && dk.Q2() <= 50.0*GeV2 ){
+        if ( dk.x() > 0.0005 && dk.x() <= 0.0014 )
+          return 6;
+        else if ( dk.x() > 0.0014 && dk.x() <= 0.0030 )
+          return 7;
+        else if ( dk.x() > 0.0030 && dk.x() <= 0.0100 )
+          return 8;
+      }
+      return -1;
+    }
+    
+    
+    void analyze(const Event& event) {
+      const FinalStateHCM& fs = applyProjection<FinalStateHCM>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const CentralEtHCM y1 = applyProjection<CentralEtHCM>(event, "Y1HCM");
+      
+      const int ibin = _getbin(dk);
+      if (ibin < 0) vetoEvent;
+      const double weight = event.weight();
+      
+      for (size_t i = 0, N = fs.particles().size(); i < N; ++i) {
+        const double rap = fs.particles()[i].momentum().rapidity();
+        const double et = fs.particles()[i].momentum().Et();
+        _hEtFlow[ibin]->fill(rap, weight * et/GeV);
+        _hEtFlowStat[ibin]->fill(rap, weight * et/GeV);
+      }
+      
+      _nev[ibin] += weight;
+      _hAvEt->fill(ibin + 1.5, weight * y1.sumEt()/GeV);
+      _hAvX->fill(ibin + 1.5, weight * dk.x());
+      _hAvQ2->fill(ibin + 1.5, weight * dk.Q2()/GeV2);
+      _hN->fill(ibin + 1.5, weight);
+    }
+    
+    
+    void finalize() {
+      for (size_t ibin = 0; ibin < _nbin; ++ibin) {
+        _hEtFlow[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
+        _hEtFlowStat[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
+      }
+      
+      /// @todo Automate this sort of thing so that the analysis code is more readable.
+      AIDA::IDataPointSet* h = 0;
+      h = histogramFactory().divide("/H1_1995_S3167097/21", *_hAvEt, *_hN);
+      h->setTitle(_hAvEt->title());
+      histogramFactory().destroy(_hAvEt);
+      
+      h = histogramFactory().divide("/H1_1995_S3167097/22", *_hAvX, *_hN);
+      h->setTitle(_hAvX->title());
+      histogramFactory().destroy(_hAvX);
+      
+      h = histogramFactory().divide("/H1_1995_S3167097/23", *_hAvQ2, *_hN);
+      h->setTitle(_hAvQ2->title());
+      histogramFactory().destroy(_hAvQ2);
+    }
+    
+    //@}
+
+    
+  private:
+
+    /// Some integer constants used.
+    /// @todo Remove statics!
+    static const size_t _nb = 24, _nbin = 9;
+    
+    /// Some double constants used.
+    /// @todo Remove statics!
+    static const double _xmin, _xmax;
+
+    /// Histograms for the \f$ E_T \f$ flows
+    vector<AIDA::IHistogram1D*> _hEtFlow, _hEtFlowStat;
+
+    /// Histograms for averages in different kinematical bins.
+    AIDA::IHistogram1D *_hAvEt, *_hAvX, *_hAvQ2, *_hN;
+
+    /// Helper vector;
+    vector<double> _nev;
+  };
+
+
+  // Init statics
+  const double H1_1995_S3167097::_xmin = -6.0;
+  const double H1_1995_S3167097::_xmax = 6.0;
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<H1_1995_S3167097> plugin_H1_1995_S3167097;
+
+}

Copied: trunk/src/Analyses/H1_2000_S4129130.cc (from r1802, trunk/src/Analyses/HERA/H1_2000_S4129130.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/H1_2000_S4129130.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/HERA/H1_2000_S4129130.cc)
@@ -0,0 +1,305 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh" 
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Math/Constants.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/DISKinematics.hh"
+
+namespace Rivet {
+
+
+  /// @brief H1 energy flow and charged particle spectra
+  /// @author Peter Richardson
+  /// Based on the HZtool analysis
+  class H1_2000_S4129130 : public Analysis {
+  public:
+
+    /// Constructor
+    H1_2000_S4129130()
+      : Analysis("H1_2000_S4129130")
+    {
+      setBeams(ELECTRON, PROTON);
+      addProjection(DISLepton(), "Lepton");
+      addProjection(DISKinematics(), "Kinematics");
+      addProjection(FinalState(), "FS");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& event) {
+      // Get the projections
+      const FinalState & fs = applyProjection<FinalState>(event, "FS");
+      const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
+      const DISLepton    & dl = applyProjection<DISLepton>(event,"Lepton");
+
+      // Get the DIS kinematics
+      double q2  = dk.Q2();
+      double x   = dk.x();
+      double y   = dk.y();
+      double w2  = dk.W2();
+      
+      // Momentum of the scattered lepton
+      FourMomentum leptonMom = dl.out().momentum();
+      // pT energy and angle
+      double enel = leptonMom.E();
+      double thel = 180.-leptonMom.angle(dl.in().momentum())/degree;
+
+      // Extract the particles other than the lepton
+      ParticleVector particles;
+      particles.reserve(fs.particles().size());
+      const GenParticle& dislepGP = dl.out().genParticle();
+      for (ParticleVector::const_iterator p = fs.particles().begin();
+           p != fs.particles().end(); ++p) {
+        const GenParticle& loopGP = p->genParticle(); 
+        if (&loopGP == &dislepGP) continue;
+        particles.push_back(*p);
+      }
+      
+      // Cut on the forward energy
+      double efwd = 0.;
+      foreach (const Particle& p, particles) {
+        double th = 180.-p.momentum().angle(dl.in().momentum())/degree;
+        //      double th = beamAngle(p.momentum(),order);
+        if (th > 4.4 && th < 15.0) efwd += p.momentum().E();
+      }
+      // There are four possible selections for events
+      bool evcut[4];
+      // Low  Q2 selection a
+      /// @todo Units and inRange
+      evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 && 
+        thel > 157. && thel < 176.0;
+      // Low  Q2 selection b
+      /// @todo Units and inRange
+      evcut[1] = enel/GeV > 12. && y > 0.3 && y < 0.5;
+      // High Q2 selection a
+      /// @todo Units and inRange
+      evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 && 
+        w2 >= 4400. && efwd > 0.5;
+      // High Q2 selection b
+      /// @todo Units and inRange
+      evcut[3] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
+        w2 > 27110. && w2 < 45182.;
+      
+      // Veto if fails all cuts
+      if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]) ) {
+        vetoEvent;
+      }
+      
+      // Find the bins
+      int bin[4] = {-1,-1,-1,-1};
+      // For the low Q2 selection a)
+      /// @todo Units
+      if (q2 > 2.5 && q2 <= 5.) {
+        if (x > 0.00005 && x <= 0.0001 ) bin[0] = 0;
+        if (x > 0.0001  && x <= 0.0002 ) bin[0] = 1;
+        if (x > 0.0002  && x <= 0.00035) bin[0] = 2;
+        if (x > 0.00035 && x <= 0.0010 ) bin[0] = 3;
+      }
+      /// @todo Units
+      else if(q2 > 5. && q2 <= 10.) {
+        if (x > 0.0001  && x <= 0.0002 ) bin[0] = 4;
+        if (x > 0.0002  && x <= 0.00035) bin[0] = 5;
+        if (x > 0.00035 && x <= 0.0007 ) bin[0] = 6;
+        if (x > 0.0007  && x <= 0.0020 ) bin[0] = 7;
+      }
+      /// @todo Units
+      else if(q2 > 10. && q2 <= 20.) {
+        if (x > 0.0002 && x <= 0.0005) bin[0] = 8;
+        if (x > 0.0005 && x <= 0.0008) bin[0] = 9;
+        if (x > 0.0008 && x <= 0.0015) bin[0] = 10;
+        if (x > 0.0015 && x <= 0.040 ) bin[0] = 11;
+      }
+      /// @todo Units
+      else if(q2 > 20. && q2 <= 50.) {
+        if (x > 0.0005 && x <= 0.0014) bin[0] = 12;
+        if (x > 0.0014 && x <= 0.0030) bin[0] = 13;
+        if (x > 0.0030 && x <= 0.0100) bin[0] = 14;
+      }
+      /// @todo Units
+      else if (q2 > 50. && q2 <= 100.) {
+        if (x >0.0008 && x <= 0.0030) bin[0] = 15;
+        if (x >0.0030 && x <= 0.0200) bin[0] = 16;
+      }
+      /// @todo Huh?
+      evcut[0] &= bin[0] >= 0;
+      // For the low Q2 selection b)
+      if (q2 > 2.5 && q2 <= 5.  ) bin[1] = 0;
+      if (q2 > 5.  && q2 <= 10. ) bin[1] = 1;
+      if (q2 > 10. && q2 <= 20. ) bin[1] = 2;
+      if (q2 > 20. && q2 <= 50. ) bin[1] = 3;
+      if (q2 > 50. && q2 <= 100.) bin[1] = 4;
+      evcut[1] &= bin[1] >= 0;
+      // for the high Q2 selection a)
+      /// @todo Units
+      if (q2 > 100. && q2 <= 400.) {
+        if (x > 0.00251 && x <= 0.00631) bin[2] = 0;
+        if (x > 0.00631 && x <= 0.0158 ) bin[2] = 1;
+        if (x > 0.0158  && x <= 0.0398 ) bin[2] = 2;
+      }
+      /// @todo Units
+      else if (q2 > 400 && q2 <= 1100.) {
+        if (x > 0.00631 && x <= 0.0158 ) bin[2] = 3;
+        if (x > 0.0158  && x <= 0.0398 ) bin[2] = 4;
+        if (x > 0.0398  && x <= 1.     ) bin[2] = 5;
+      }
+      /// @todo Units
+      else if (q2 > 1100. && q2 <= 100000.) {
+        if (x > 0. && x <= 1.) bin[2] = 6;
+      }
+      evcut[2] &= bin[2] >= 0;
+      // for the high Q2 selection b)
+      /// @todo Units
+      if      (q2 > 100. && q2 <= 220.) bin[3] = 0;
+      else if (q2 > 220. && q2 <= 400.) bin[3] = 1;
+      else if (q2 > 400.              ) bin[3] = 2;
+      evcut[3] &= bin[3] >= 0;
+      
+      // Veto if fails all cuts after bin selection
+      if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]));
+      
+      // Increment the count for normalisation
+      const double weight = event.weight();
+      if (evcut[0]) _weightETLowQa [bin[0]] += weight;
+      if (evcut[1]) _weightETLowQb [bin[1]] += weight;
+      if (evcut[2]) _weightETHighQa[bin[2]] += weight;
+      if (evcut[3]) _weightETHighQb[bin[3]] += weight;
+      
+      // Boost to hadronicCM
+      const LorentzTransform hcmboost = dk.boostHCM();
+      
+      // Loop over the particles
+      double etcent = 0;
+      double etfrag = 0;
+      foreach (const Particle& p, particles) {
+        // Boost momentum to CMS
+        const FourMomentum hcmMom = hcmboost.transform(p.momentum());
+        double et = fabs(Et(hcmMom));
+        double eta = -hcmMom.pseudorapidity();
+        // Averages in central and forward region
+        if (fabs(eta) < .5 ) etcent += et;
+        if (eta > 2 && eta <= 3.) etfrag += et;
+        // Histograms of Et flow
+        if (evcut[0]) _histETLowQa [bin[0]]->fill(eta, et*weight);
+        if (evcut[1]) _histETLowQb [bin[1]]->fill(eta, et*weight);
+        if (evcut[2]) _histETHighQa[bin[2]]->fill(eta, et*weight);
+        if (evcut[3]) _histETHighQb[bin[3]]->fill(eta, et*weight);
+      }
+      // Fill histograms for the average quantities
+      if (evcut[1] || evcut[3]) {
+        _histAverETCentral->fill(q2, etcent*weight,weight);
+        _histAverETFrag   ->fill(q2, etfrag*weight,weight);
+      }
+    }
+    
+    
+    void init() {
+      
+      string t = "Transverse energy flow for ";
+      IHistogram1D* h = 0;
+      
+      /// @todo What is "N"?
+      
+      const string xt = "\\langle x \\rangle";
+      const string Q2t = "\\langle Q^2 \\rangle";
+      
+      // Histograms and weight vectors for low Q^2 a
+      _histETLowQa.reserve(17);
+      _weightETLowQa.reserve(17);
+      for (size_t ix = 0; ix < 17; ++ix) {
+        h = bookHistogram1D(ix+1, 1, 1);
+        _histETLowQa.push_back(h);
+        _weightETLowQa.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for high Q^2 a
+      _histETHighQa.reserve(7);
+      _weightETHighQa.reserve(7);
+      for (size_t ix = 0; ix < 7; ++ix) {
+        h = bookHistogram1D(ix+18, 1, 1);
+        _histETHighQa.push_back(h);
+        _weightETHighQa.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for low Q^2 b
+      _histETLowQb.reserve(5);
+      _weightETLowQb.reserve(5);
+      for (size_t ix = 0; ix < 5; ++ix) {
+        h = bookHistogram1D(ix+25, 1, 1);
+        _histETLowQb.push_back(h);
+        _weightETLowQb.push_back(0.);
+      }
+      
+      // Histograms and weight vectors for high Q^2 b
+      _histETHighQb.reserve(3);
+      _weightETHighQb.reserve(3);
+      for (size_t ix = 0; ix < 3; ++ix) {
+        h = bookHistogram1D(30+ix, 1, 1);
+        _histETHighQb.push_back(h);
+        _weightETHighQb.push_back(0.0);
+      }
+      
+      // Histograms for the averages
+      _histAverETCentral = bookProfile1D(33,  1, 1);
+      _histAverETFrag = bookProfile1D(34,  1, 1);
+    }
+    
+    
+    // Finalize
+    void finalize() { 
+      // Normalization of the Et distributions
+      for (size_t ix=0; ix<17; ++ix) {
+        scale(_histETLowQa[ix], 1./_weightETLowQa[ix]);
+      }
+      for(size_t ix=0; ix<7; ++ix) {
+        scale(_histETHighQa[ix], 1./_weightETHighQa[ix]);
+      }
+      for(size_t ix=0; ix<5; ++ix) {
+        scale(_histETLowQb[ix], 1./_weightETLowQb[ix]);
+      }
+      for(size_t ix=0; ix<3; ++ix) {
+        scale(_histETHighQb[ix], 1./_weightETHighQb[ix]);
+      }
+    }
+    
+
+    //@}
+
+
+  private:
+    
+    /// Polar angle with right direction of the beam
+    inline double beamAngle(const FourVector& v, const bool & order) {
+      double thel = v.polarAngle()/degree;
+      if(thel<0.) thel+=180.;
+      if(!order) thel = 180.-thel;
+      return thel;
+    }
+
+    /// @name Histograms
+    //@{
+    vector<AIDA::IHistogram1D *> _histETLowQa;
+    vector<AIDA::IHistogram1D *> _histETHighQa;
+    vector<AIDA::IHistogram1D *> _histETLowQb;
+    vector<AIDA::IHistogram1D *> _histETHighQb;
+    AIDA::IProfile1D * _histAverETCentral;
+    AIDA::IProfile1D * _histAverETFrag;
+    //@}
+
+    /// @name storage of weights for normalisation
+    //@{
+    vector<double> _weightETLowQa;
+    vector<double> _weightETHighQa;
+    vector<double> _weightETLowQb;
+    vector<double> _weightETHighQb;
+    //@}
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<H1_2000_S4129130> plugin_H1_2000_S4129130;
+
+}

Copied: trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc (from r1802, trunk/src/Analyses/Misc/JADE_OPAL_2000_S4300807.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Misc/JADE_OPAL_2000_S4300807.cc)
@@ -0,0 +1,299 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/Beam.hh"
+
+namespace Rivet {
+
+
+  /**
+   * @brief Jet rates in e+e- at OPAL and JADE
+   * @author Frank Siegert
+   *
+   * @par Run conditions
+   *
+   * @arg LEP1 beam energy: \f$ \sqrt{s} = \$f 91.2 GeV
+   * @arg Run with generic QCD events.
+   */
+  class JADE_OPAL_2000_S4300807 : public Analysis {
+
+  public:
+
+    /// @name Constructors etc.
+    //@{
+
+    /// Constructor
+    JADE_OPAL_2000_S4300807(const string& sqrtsstr, double sqrts, 
+                            int nr_R_Jade, int nr_R_Durham, int nr_y_Durham)
+      : Analysis("JADE_OPAL_2000_S4300807" + ("_" + sqrtsstr + "GEV")),
+        _sqrts(sqrts), 
+        _nr_R_Jade(nr_R_Jade),
+        _nr_R_Durham(nr_R_Durham), 
+        _nr_y_Durham(nr_y_Durham)
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      const FinalState fs;
+      addProjection(fs, "FS");
+      addProjection(FastJets(fs, FastJets::JADE, 0.7), "JadeJets");
+      addProjection(FastJets(fs, FastJets::DURHAM, 0.7), "DurhamJets");
+    }
+    
+    //@}
+
+
+    /// @name Publication metadata
+    //@{
+    /// Collider on which the experiment ran.
+    string collider() const {
+      if (_sqrts < 90.0) {
+        return "DESY PETRA";
+      } else if (inRange(_sqrts, 90.0, 92.0)) {
+        return "LEP Run I";
+      } else {
+        return "LEP Run 2";
+      }
+    }
+    //@}
+
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      for (size_t i=0; i<5; ++i) {
+        _h_R_Jade[i]=bookDataPointSet(_nr_R_Jade, 1, i+1);
+        _h_R_Durham[i]=bookDataPointSet(_nr_R_Durham, 1, i+1);
+        if (i<4)_h_y_Durham[i]=bookHistogram1D(_nr_y_Durham, 1, i+1);
+      }
+    }
+
+
+
+    void analyze(const Event& e) {
+      
+      // Are we running with a compatible CMS energy?
+      const double sbeams = applyProjection<Beam>(e, "Beams").sqrtS();
+      if (fabs(sbeams - _sqrts)/GeV > 0.5) {
+        getLog() << Log::ERROR 
+                 << "CMS energy of events sqrt(s) = " << sbeams
+                 <<" doesn't match analysis energy sqrt(s) = " << _sqrts << endl;
+        /// @todo Really call exit()? I don't like the break of "command chain" that this implies
+        exit(1);
+      }
+      
+      // Jets
+      getLog() << Log::DEBUG << "Using FastJet JADE patch to make diff jet rate plots:" << endl;
+      const double weight = e.weight();
+      
+      const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
+      if (jadejet.clusterSeq()) {
+        double y_23 = jadejet.clusterSeq()->exclusive_ymerge(2);
+        double y_34 = jadejet.clusterSeq()->exclusive_ymerge(3);
+        double y_45 = jadejet.clusterSeq()->exclusive_ymerge(4);
+        double y_56 = jadejet.clusterSeq()->exclusive_ymerge(5);
+        
+        for (int i = 0; i < _h_R_Jade[0]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[0]->point(i);
+          if (y_23 < dp->coordinate(0)->value()) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Jade[1]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[1]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_34 < ycut && y_23 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Jade[2]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[2]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_45 < ycut && y_34 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Jade[3]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[3]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_56 < ycut && y_45 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Jade[4]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[4]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_56 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+      }
+      
+      const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
+      if (durjet.clusterSeq()) {
+        double y_23 = durjet.clusterSeq()->exclusive_ymerge(2);
+        double y_34 = durjet.clusterSeq()->exclusive_ymerge(3);
+        double y_45 = durjet.clusterSeq()->exclusive_ymerge(4);
+        double y_56 = durjet.clusterSeq()->exclusive_ymerge(5);
+        
+        _h_y_Durham[0]->fill(y_23, weight);
+        _h_y_Durham[1]->fill(y_34, weight);
+        _h_y_Durham[2]->fill(y_45, weight);
+        _h_y_Durham[3]->fill(y_56, weight);
+        
+        for (int i = 0; i < _h_R_Durham[0]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[0]->point(i);
+          if (y_23 < dp->coordinate(0)->value()) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Durham[1]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[1]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_34 < ycut && y_23 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Durham[2]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[2]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_45 < ycut && y_34 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Durham[3]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[3]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_56 < ycut && y_45 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+        for (int i = 0; i < _h_R_Durham[4]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[4]->point(i);
+          double ycut = dp->coordinate(0)->value();
+          if (y_56 > ycut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight);
+          }
+        }
+      }
+    }
+
+
+
+    /// Finalize
+    void finalize() {
+      for (size_t n = 0; n < 4; ++n) {
+        scale(_h_y_Durham[n], 1.0/sumOfWeights());
+      }
+      
+      for (size_t n = 0; n < 5; ++n) {
+        /// scale integrated jet rates to 100%
+        for (int i = 0; i < _h_R_Jade[n]->size(); ++i) {
+          IDataPoint* dp = _h_R_Jade[n]->point(i);
+          dp->coordinate(1)->setValue(dp->coordinate(1)->value()*100.0/sumOfWeights());
+        }
+        for (int i = 0; i < _h_R_Durham[n]->size(); ++i) {
+          IDataPoint* dp = _h_R_Durham[n]->point(i);
+          dp->coordinate(1)->setValue(dp->coordinate(1)->value()*100.0/sumOfWeights());
+        }
+      }
+    }
+    
+    //@}
+    
+    
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IDataPointSet *_h_R_Jade[5];
+    AIDA::IDataPointSet *_h_R_Durham[5];
+    AIDA::IHistogram1D *_h_y_Durham[4];
+    //@}
+
+    double _sqrts;
+    int _nr_R_Jade, _nr_R_Durham, _nr_y_Durham;
+
+  };
+
+
+
+  //////////////////////////////////////////////////////////////
+
+
+
+  class JADE_OPAL_2000_S4300807_35GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_35GEV() : JADE_OPAL_2000_S4300807("35", 35.0, 7, 16, 24) {}
+    string summary() const { return "Jet rates in e+e- at JADE [35 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_35GEV> plugin_JADE_OPAL_2000_S4300807_35GEV;
+
+
+  class JADE_OPAL_2000_S4300807_44GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_44GEV() : JADE_OPAL_2000_S4300807("44", 44.0, 8, 17, 25) {}
+    string summary() const { return "Jet rates in e+e- at JADE [44 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_44GEV> plugin_JADE_OPAL_2000_S4300807_44GEV;
+
+
+  class JADE_OPAL_2000_S4300807_91GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_91GEV() : JADE_OPAL_2000_S4300807("91", 91.2, 9, 18, 26) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [91 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_91GEV> plugin_JADE_OPAL_2000_S4300807_91GEV;
+
+
+  class JADE_OPAL_2000_S4300807_133GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_133GEV() : JADE_OPAL_2000_S4300807("133", 133.0, 10, 19, 27) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [133 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_133GEV> plugin_JADE_OPAL_2000_S4300807_133GEV;
+
+
+  class JADE_OPAL_2000_S4300807_161GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_161GEV() : JADE_OPAL_2000_S4300807("161", 161.0, 11, 20, 28) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [161 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_161GEV> plugin_JADE_OPAL_2000_S4300807_161GEV;
+
+
+  class JADE_OPAL_2000_S4300807_172GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_172GEV() : JADE_OPAL_2000_S4300807("172", 172.0, 12, 21, 29) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [172 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_172GEV> plugin_JADE_OPAL_2000_S4300807_172GEV;
+
+
+  class JADE_OPAL_2000_S4300807_183GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_183GEV() : JADE_OPAL_2000_S4300807("183", 183.0, 13, 22, 30) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [183 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_183GEV> plugin_JADE_OPAL_2000_S4300807_183GEV;
+
+
+  class JADE_OPAL_2000_S4300807_189GEV : public JADE_OPAL_2000_S4300807 {
+  public:
+    JADE_OPAL_2000_S4300807_189GEV() : JADE_OPAL_2000_S4300807("189", 189.0, 14, 23, 31) {}
+    string summary() const { return "Jet rates in e+e- at OPAL [189 GeV]."; }
+  };
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<JADE_OPAL_2000_S4300807_189GEV> plugin_JADE_OPAL_2000_S4300807_189GEV;
+
+
+}

Copied: trunk/src/Analyses/MC_JetAnalysis.cc (from r1802, trunk/src/Analyses/MC/MC_JetAnalysis.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_JetAnalysis.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_JetAnalysis.cc)
@@ -0,0 +1,169 @@
+// -*- C++ -*-
+#include "Rivet/Analyses/MC_JetAnalysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+
+  MC_JetAnalysis::MC_JetAnalysis(const std::string& name, const double& sqrts,
+                                 const size_t& njet, const std::string& jetpro_name)
+    : Analysis(name), m_sqrts(sqrts), m_njet(njet), m_jetpro_name(jetpro_name),
+    _h_log10_d(njet, NULL), _h_log10_R(njet+1, NULL), _h_pT_jet(njet, NULL),
+    _h_eta_jet(njet, NULL)
+  {
+    setNeedsCrossSection(true);
+  }
+
+
+
+  // Book histograms
+  void MC_JetAnalysis::init() {
+    
+    for (size_t i=0; i<m_njet; ++i) {
+      stringstream dname;
+      dname<<"log10_d_"<<i<<i+1;
+      _h_log10_d[i] = bookHistogram1D(dname.str(), 50, 0.2, 2.6);
+      
+      stringstream Rname;
+      Rname<<"log10_R_"<<i;
+      _h_log10_R[i] = bookDataPointSet(Rname.str(), 50, 0.2, 2.6);
+      
+      stringstream pTname;
+      pTname<<"jet_pT_"<<i+1;
+      double pTmax = 1.0/(double(i)+2.0)*m_sqrts/2.0;
+      int nbins = 100/(i+1);
+      _h_pT_jet[i] = bookHistogram1D(pTname.str(), nbins, 0.0, pTmax);
+      
+      stringstream etaname;
+      etaname<<"jet_eta_"<<i+1;
+      _h_eta_jet[i] = bookHistogram1D(etaname.str(), 50, -5.0, 5.0);
+      
+      for (size_t j=i+1; j<m_njet; ++j) {
+        std::pair<size_t, size_t> ij(std::make_pair(i, j));
+        
+        stringstream detaname;
+        detaname<<"jets_deta_"<<i+1<<j+1;
+        _h_deta_jets.insert(make_pair(ij, bookHistogram1D(detaname.str(), 50, -5.0, 5.0)));
+        
+        stringstream dRname;
+        dRname<<"jets_dR_"<<i+1<<j+1;
+        _h_dR_jets.insert(make_pair(ij, bookHistogram1D(dRname.str(), 25, 0.0, 5.0)));
+      }
+    }
+    stringstream Rname;
+    Rname<<"log10_R_"<<m_njet;
+    _h_log10_R[m_njet] = bookDataPointSet(Rname.str(), 50, 0.2, 2.6);
+    
+    _h_jet_multi_exclusive = bookHistogram1D("jet_multi_exclusive", m_njet+3, -0.5, m_njet+3-0.5);
+    _h_jet_multi_inclusive = bookHistogram1D("jet_multi_inclusive", m_njet+3, -0.5, m_njet+3-0.5);
+    _h_jet_multi_ratio = bookDataPointSet("jet_multi_ratio", m_njet+2, 0.5, m_njet+3-0.5);
+  }
+
+
+
+  // Do the analysis 
+  void MC_JetAnalysis::analyze(const Event & e) {
+    double weight = e.weight();
+    
+    const FastJets& jetpro = applyProjection<FastJets>(e, m_jetpro_name);
+
+    // jet resolutions and integrated jet rates
+    const fastjet::ClusterSequence* seq = jetpro.clusterSeq();
+    if (seq!=NULL) {
+      double previous_dij = 10.0;
+      for (size_t i=0; i<m_njet; ++i) {
+        // jet resolution i -> j
+        double d_ij=log10(sqrt(seq->exclusive_dmerge_max(i)));
+        
+        // fill differential jet resolution
+        _h_log10_d[i]->fill(d_ij, weight);
+        
+        // fill integrated jet resolution
+        for (int ibin=0; ibin<_h_log10_R[i]->size(); ++ibin) {
+          IDataPoint* dp=_h_log10_R[i]->point(ibin);
+          double dcut=dp->coordinate(0)->value();
+          if (d_ij<dcut && previous_dij>dcut) {
+            dp->coordinate(1)->setValue(dp->coordinate(1)->value()+weight);
+          }
+        }
+        previous_dij = d_ij;
+      }
+      // one remaining integrated jet resolution
+      for (int ibin=0; ibin<_h_log10_R[m_njet]->size(); ++ibin) {
+        IDataPoint* dp=_h_log10_R[m_njet]->point(ibin);
+        double dcut=dp->coordinate(0)->value();
+        if (previous_dij>dcut) {
+          dp->coordinate(1)->setValue(dp->coordinate(1)->value()+weight);
+        }
+      }
+    }
+
+    const Jets& jets = jetpro.jetsByPt(20.0);
+    
+    // the remaining direct jet observables
+    for (size_t i=0; i<m_njet; ++i) {
+      if (jets.size()<i+1) continue;
+      _h_pT_jet[i]->fill(jets[i].momentum().pT(), weight);
+      _h_eta_jet[i]->fill(jets[i].momentum().eta(), weight);
+      
+      for (size_t j=i+1; j<m_njet; ++j) {
+        if (jets.size()<j+1) continue;
+        std::pair<size_t, size_t> ij(std::make_pair(i, j));
+        double deta = jets[i].momentum().eta()-jets[j].momentum().eta();
+        double dR = deltaR(jets[i].momentum(), jets[j].momentum());
+        _h_deta_jets[ij]->fill(deta, weight);
+        _h_dR_jets[ij]->fill(dR, weight);
+      }
+    }
+    _h_jet_multi_exclusive->fill(jets.size(), weight);
+
+    for (size_t i=0; i<m_njet+2; ++i) {
+      if (jets.size()>=i) {
+        _h_jet_multi_inclusive->fill(i, weight);
+      }
+    }
+  }
+
+
+  // Finalize
+  void MC_JetAnalysis::finalize() {
+    for (size_t i=0; i<m_njet; ++i) {
+      scale(_h_log10_d[i], crossSection()/sumOfWeights());
+      for (int ibin=0; ibin<_h_log10_R[i]->size(); ++ibin) {
+        IDataPoint* dp=_h_log10_R[i]->point(ibin);
+        dp->coordinate(1)->setValue(dp->coordinate(1)->value()*crossSection()/sumOfWeights());
+      }
+      
+      scale(_h_pT_jet[i], crossSection()/sumOfWeights());
+      scale(_h_eta_jet[i], crossSection()/sumOfWeights());
+      
+      for (size_t j=i+1; j<m_njet; ++j) {
+      }
+    }
+    for (int ibin=0; ibin<_h_log10_R[m_njet]->size(); ++ibin) {
+      IDataPoint* dp=_h_log10_R[m_njet]->point(ibin);
+      dp->coordinate(1)->setValue(dp->coordinate(1)->value()*crossSection()/sumOfWeights());
+    }
+
+    // fill inclusive jet multi ratio
+    int Nbins=_h_jet_multi_inclusive->axis().bins();
+    std::vector<double> ratio(Nbins-1, 0.0);
+    std::vector<double> err(Nbins-1, 0.0);
+    for (int i=0; i<Nbins-1; ++i) {
+      if (_h_jet_multi_inclusive->binHeight(i)>0.0 && _h_jet_multi_inclusive->binHeight(i+1)>0.0) {
+        ratio[i]=_h_jet_multi_inclusive->binHeight(i+1)/_h_jet_multi_inclusive->binHeight(i);
+        double relerr_i=_h_jet_multi_inclusive->binError(i)/_h_jet_multi_inclusive->binHeight(i);
+        double relerr_j=_h_jet_multi_inclusive->binError(i+1)/_h_jet_multi_inclusive->binHeight(i+1);
+        err[i]=ratio[i]*(relerr_i+relerr_j);
+      }
+    }
+    _h_jet_multi_ratio->setCoordinate(1, ratio, err);
+
+    scale(_h_jet_multi_exclusive, crossSection()/sumOfWeights());
+    scale(_h_jet_multi_inclusive, crossSection()/sumOfWeights());
+  }
+
+
+}

Copied: trunk/src/Analyses/MC_LHC_DIJET.cc (from r1802, trunk/src/Analyses/MC/MC_LHC_DIJET.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_LHC_DIJET.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_LHC_DIJET.cc)
@@ -0,0 +1,119 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  class MC_LHC_DIJET : public Analysis {
+  public:
+
+    /// Default constructor
+    MC_LHC_DIJET() 
+      : Analysis("MC_LHC_DIJET") {
+      FinalState fs;
+      ChargedFinalState cfs;
+      addProjection(fs, "FS");
+      addProjection(cfs, "CFS");
+      addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
+      addProjection(FastJets(cfs, FastJets::KT, 0.7), "ChargedJets");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() { 
+      _hist_jetcount = bookHistogram1D("d01-x01-y01", 5, 0., 10.);
+      _hist_jetpt = bookHistogram1D("d02-x01-y01", 30, 30.,100.);
+      _hist_jetptlog = bookHistogram1D("d03-x01-y01", 20, 0.,8.);
+      _hist_leadingjetpt = bookHistogram1D("d04-x01-y01", 25, 30.,100.);
+      _hist_secondleadingjetpt = bookHistogram1D("d05-x01-y01", 25, 30.,100.);
+      _hist_jetphi = bookHistogram1D("d06-x01-y01",24, 0., 6.4);
+      _hist_jeteta = bookHistogram1D("d07-x01-y01", 30, -6., 6.);
+      _hist_jetdphi = bookHistogram1D("d08-x01-y01", 24, 0., 6.4);
+      _hist_jetdeta = bookHistogram1D("d09-x01-y01", 24, 0., 6.);
+      _hist_chargemultiplicity = bookHistogram1D("d10-x01-y01",30, 0.5, 250.5);
+      _hist_chargemeanpt = bookHistogram1D("d11-x01-y01", 25, 0., 10.);    
+      _hist_chargept = bookHistogram1D("d12-x01-y01", 32, 0., 25.);
+      _hist_chargelogpt = bookHistogram1D("d13-x01-y01", 32, 0., 6.);    
+      _hist_chargermspt = bookHistogram1D("d14-x01-y01", 32, 0., 10.);
+    }
+    
+    
+    void analyze(const Event& event) {
+      const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
+      const Jets jets = fastjets.jetsByPt(20.);
+      const double weight = event.weight();
+      
+      if (jets.size() < 2 || jets.size() >= 3) vetoEvent;
+      const double angle = fabs(jets[1].momentum().azimuthalAngle() - jets[0].momentum().azimuthalAngle());
+      const double prapidity = fabs(jets[1].momentum().pseudorapidity() - jets[0].momentum().pseudorapidity());
+      _hist_jetcount->fill(jets.size(), weight);
+      _hist_leadingjetpt->fill(jets[0].momentum().pT(), weight);
+      _hist_secondleadingjetpt->fill(jets[1].momentum().pT(), weight);
+      _hist_jetdphi->fill(angle , weight);
+      _hist_jetdeta->fill(prapidity, weight);
+      
+      foreach(Jet j, fastjets.jetsByPt(20*GeV)) {
+        _hist_jetpt->fill(j.momentum().pT(), weight);
+        _hist_jetptlog->fill(log(j.momentum().pT()), weight);
+        _hist_jetphi->fill(j.momentum().azimuthalAngle(), weight);
+        _hist_jeteta->fill(j.momentum().pseudorapidity(), weight);	
+      }
+      
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
+      // const FastJets& cfastjets = applyProjection<FastJets>(event, "ChargedJets");
+      double meanpt(0), rmspt(0);
+      /// @todo Add jets
+      // foreach(Jet cj, cfastjets.jetsByPt(20.)){
+      _hist_chargemultiplicity->fill(cfs.particles().size(), weight);
+      foreach(Particle cp, cfs.particles()) {
+        meanpt= meanpt + cp.momentum().pT();
+        rmspt = rmspt + (cp.momentum().pT()*cp.momentum().pT());
+        _hist_chargept->fill(cp.momentum().pT(), weight);
+        _hist_chargelogpt->fill(log(cp.momentum().pT()), weight);
+      }
+      meanpt = meanpt / cfs.particles().size();
+      _hist_chargemeanpt->fill(meanpt, weight);
+      rmspt = sqrt(rmspt / cfs.particles().size());
+      _hist_chargermspt->fill(rmspt, weight);
+      // }
+    }
+    
+    
+    void finalize() {
+      /// @todo Normalise!
+    }
+    
+    //@}
+
+
+  private:
+    
+    AIDA::IHistogram1D* _hist_jetcount;
+    AIDA::IHistogram1D* _hist_jetpt;
+    AIDA::IHistogram1D* _hist_jetptlog;
+    AIDA::IHistogram1D* _hist_leadingjetpt;
+    AIDA::IHistogram1D* _hist_secondleadingjetpt;
+    AIDA::IHistogram1D* _hist_jetphi;
+    AIDA::IHistogram1D* _hist_jetdphi;
+    AIDA::IHistogram1D* _hist_jeteta;
+    AIDA::IHistogram1D* _hist_jetdeta;
+    AIDA::IHistogram1D* _hist_chargemultiplicity;
+    AIDA::IHistogram1D* _hist_chargemeanpt;
+    AIDA::IHistogram1D* _hist_chargept;
+    AIDA::IHistogram1D* _hist_chargelogpt;
+    AIDA::IHistogram1D* _hist_chargermspt;
+    
+  };
+  
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_LHC_DIJET> plugin_MC_LHC_DIJET;
+  
+}

Copied: trunk/src/Analyses/MC_LHC_LEADINGJETS.cc (from r1802, trunk/src/Analyses/MC/MC_LHC_LEADINGJETS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_LHC_LEADINGJETS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_LHC_LEADINGJETS.cc)
@@ -0,0 +1,175 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /* Underlying event in leading jet, extended to the LHC
+   * @author Andy Buckley
+   */ 
+  class MC_LHC_LEADINGJETS : public Analysis {
+  public:
+    
+    /// Constructor
+    MC_LHC_LEADINGJETS()
+      : Analysis("MC_LHC_LEADINGJETS")
+    { 
+      setBeams(PROTON, PROTON);
+      
+      // Final state for the jet finding
+      const FinalState fsj(-4.0, 4.0, 0.0*GeV);
+      addProjection(fsj, "FSJ");
+      addProjection(FastJets(fsj, FastJets::KT, 0.7), "Jets");
+      
+      // Charged final state for the distributions
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      addProjection(cfs, "CFS");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+    
+    // Book histograms
+    void init() {
+      const double maxpt1 = 500.0/GeV;
+      _hist_pnchg      = bookProfile1D("trans-nchg", 50, 0.0, maxpt1);
+      _hist_pmaxnchg   = bookProfile1D("trans-maxnchg", 50, 0.0, maxpt1);
+      _hist_pminnchg   = bookProfile1D("trans-minnchg", 50, 0.0, maxpt1);
+      _hist_pcptsum    = bookProfile1D("trans-ptsum", 50, 0.0, maxpt1);
+      _hist_pmaxcptsum = bookProfile1D("trans-maxptsum", 50, 0.0, maxpt1);
+      _hist_pmincptsum = bookProfile1D("trans-minptsum", 50, 0.0, maxpt1);
+      _hist_pcptave    = bookProfile1D("trans-ptavg", 50, 0.0, maxpt1);
+    }
+
+
+    // Do the analysis
+    void analyze(const Event& e) {
+
+      const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
+      if (fsj.particles().empty()) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+
+      const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
+      const Jets jets = jetpro.jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
+
+      // We require the leading jet to be within |eta| < 2
+      if (jets.size() < 1 || fabs(jets[0].momentum().pseudorapidity()) > 2) {
+        getLog() << Log::DEBUG << "Failed jet cut" << endl;
+        vetoEvent;
+      }
+
+      const double jetphi = jets[0].momentum().phi();
+      const double jetpT  = jets[0].momentum().pT();
+      getLog() << Log::DEBUG << "Leading jet: pT = " << jetpT
+               << ", eta = " << jets[0].momentum().pseudorapidity()
+               << ", phi = " << jetphi << endl;
+
+      // Get the event weight
+      const double weight = e.weight();
+
+      // Get the final states to work with for filling the distributions
+      const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+
+      size_t   numOverall(0),     numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0);
+      double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
+      double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
+
+      // Calculate all the charged stuff
+      foreach (const Particle& p, cfs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double pT = p.momentum().pT();
+        const double phi = p.momentum().azimuthalAngle();
+        const double rotatedphi = phi - jetphi;
+
+        ptSumOverall += pT;
+        ++numOverall;
+        if (pT > ptMaxOverall) ptMaxOverall = pT;
+
+        if (dPhi < PI/3.0) {
+          ptSumToward += pT;
+          ++numToward;
+          if (pT > ptMaxToward) ptMaxToward = pT;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            ptSumTrans1 += pT;
+            ++numTrans1;
+            if (pT > ptMaxTrans1) {
+              ptMaxTrans1 = pT;
+            } else {
+              ptSumTrans2 += pT;
+              ++numTrans2;
+              if (pT > ptMaxTrans2) ptMaxTrans2 = pT;
+            }
+          }
+        }
+        else {
+          ptSumAway += pT;
+          ++numAway;
+          if (pT > ptMaxAway) ptMaxAway = pT;
+        }
+      }
+      
+      
+      // Fill the histograms
+      //_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
+      _hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
+      _hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      //_hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
+      //_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
+      
+      //_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
+      _hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
+      _hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      //_hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
+      //_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
+      
+      //if (numToward > 0) {
+      //  _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
+      //  _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
+      //}
+      if ((numTrans1+numTrans2) > 0) {
+        _hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
+        //_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
+      }
+      //if (numAway > 0) {
+      //  _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
+      //  _hist_acptmax->fill(jetpT, ptMaxAway, weight);
+      //}
+    }
+    
+    
+    void finalize() {  
+      //
+    }
+    
+    
+  private:
+    
+    AIDA::IProfile1D *_hist_pnchg;
+    AIDA::IProfile1D *_hist_pmaxnchg;
+    AIDA::IProfile1D *_hist_pminnchg;
+    AIDA::IProfile1D *_hist_pcptsum;
+    AIDA::IProfile1D *_hist_pmaxcptsum;
+    AIDA::IProfile1D *_hist_pmincptsum;
+    AIDA::IProfile1D *_hist_pcptave;  
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_LHC_LEADINGJETS> plugin_MC_LHC_LEADINGJETS;
+  
+}

Copied: trunk/src/Analyses/MC_LHC_WANALYSIS.cc (from r1802, trunk/src/Analyses/MC/MC_LHC_WANALYSIS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_LHC_WANALYSIS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_LHC_WANALYSIS.cc)
@@ -0,0 +1,130 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/WFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  class MC_LHC_WANALYSIS : public Analysis {
+  public:
+  
+    /// Default constructor
+    MC_LHC_WANALYSIS()
+      : Analysis("MC_LHC_WANALYSIS") 
+    {
+      const ChargedFinalState cfs;
+      addProjection(cfs, "CFS");
+      /// @todo Handle muon-decay Ws as well
+      const WFinder wf(-MAXRAPIDITY, MAXRAPIDITY, 0.0*GeV, ELECTRON, 30.0*GeV, 110.0*GeV, 0.2);
+      addProjection(wf, "WF");
+      FastJets fastjets(wf.remainingFinalState(), FastJets::KT, 0.7);
+      addProjection(fastjets, "Jets");
+    }
+    
+
+    /// @name Analysis methods
+    /// @todo change "Weights" to differential cross sections once histos normalised to cross-section.
+    //@{
+
+    void init() { 
+      _hist_chargemulti = bookHistogram1D("d01-x01-y01", 30, 0.5, 250.5);
+      _hist_chargept = bookHistogram1D("d02-x01-y01", 32, 0., 25.);
+      _hist_chargemeanpt = bookHistogram1D("d03-x01-y01", 25, 0., 10.);
+      _hist_chargermspt = bookHistogram1D("d04-x01-y01", 32, 0., 10.);
+      _hist_wcount = bookHistogram1D("d05-x01-y01", 30, 0., 15.);
+      _hist_wpt = bookHistogram1D("d06-x01-y01", 32, 0., 25.);
+      _hist_wlogpt = bookHistogram1D("d07-x01-y01", 32, 0., 6.);
+      _hist_weta = bookHistogram1D("d08-x01-y01", 32, -6., 6.);
+      _hist_wphi = bookHistogram1D("d09-x01-y01", 32, 0., 6.4);
+      _hist_wmass = bookHistogram1D("d10-x01-y01", 40, 60., 100.);
+      _hist_wlogmass = bookHistogram1D("d11-x01-y01", 32, 0., 10.);
+      _hist_jetcount = bookHistogram1D("d12-x01-y01", 32, 0, 100);
+      _hist_jetpt = bookHistogram1D("d13-x01-y01", 32, 20., 100.);
+      _hist_jetlogpt = bookHistogram1D("d14-x01-y01", 32, 0., 20.);
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      const FinalState& cfs = applyProjection<FinalState>(event, "CFS");
+      const WFinder& wf = applyProjection<WFinder>(event, "WF");
+      const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
+      const Jets jets = fastjets.jetsByPt();
+    
+      // Charged particles part
+      _hist_chargemulti->fill(cfs.particles().size(), weight);
+      double meanpt(0), rmspt(0);
+      foreach (const Particle& p, cfs.particles()) {
+        const double pT = p.momentum().pT();
+        _hist_chargept->fill(pT/GeV, weight);
+        meanpt += pT;
+        rmspt += pT*pT;
+      }
+      meanpt = meanpt / cfs.particles().size();
+      _hist_chargemeanpt->fill(meanpt/GeV, weight);
+      rmspt = sqrt(rmspt / cfs.particles().size());
+      _hist_chargermspt->fill(rmspt/GeV, weight);
+      
+      // W part
+      _hist_wcount->fill(wf.particles().size(), weight);
+      foreach (const Particle& wp, wf.particles()) {
+        const double pT = wp.momentum().pT();
+        _hist_wpt->fill(pT/GeV, weight);
+        _hist_wlogpt->fill(log(pT/GeV), weight);
+        _hist_weta->fill(wp.momentum().pseudorapidity(), weight);
+        _hist_wphi->fill(wp.momentum().azimuthalAngle(), weight);
+        const double m = wp.momentum().mass();
+        _hist_wmass->fill(m/GeV, weight);
+        _hist_wlogmass->fill(log(m/GeV), weight);	
+      }
+      
+      // Jet part
+      _hist_jetcount->fill(fastjets.size(), weight);
+      foreach(const Jet& j, fastjets.jetsByPt()) {
+        const double pT = j.momentum().pT();
+        _hist_jetpt->fill(pT/GeV, weight);
+        _hist_jetlogpt->fill(log(pT/GeV), weight);
+      }
+    }
+    
+    
+    void finalize() {
+      ///@todo Obtain cross-sections from generator and normalise histos to them.
+    }
+    
+    //@}
+    
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _hist_chargemulti;
+    AIDA::IHistogram1D* _hist_chargept;
+    AIDA::IHistogram1D* _hist_chargemeanpt;
+    AIDA::IHistogram1D* _hist_chargermspt;
+    AIDA::IHistogram1D* _hist_wcount;
+    AIDA::IHistogram1D* _hist_wpt;
+    AIDA::IHistogram1D* _hist_wlogpt;
+    //AIDA::IHistogram1D* _hist_zpthigh;
+    //AIDA::IHistogram1D* _hist_zlogpthigh;
+    AIDA::IHistogram1D* _hist_weta;
+    AIDA::IHistogram1D* _hist_wphi;
+    AIDA::IHistogram1D* _hist_wmass;
+    AIDA::IHistogram1D* _hist_wlogmass;
+    AIDA::IHistogram1D* _hist_jetcount;
+    AIDA::IHistogram1D* _hist_jetpt;
+    AIDA::IHistogram1D* _hist_jetlogpt;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_LHC_WANALYSIS> plugin_MC_LHC_WANALYSIS;
+
+}

Copied: trunk/src/Analyses/MC_LHC_ZANALYSIS.cc (from r1802, trunk/src/Analyses/MC/MC_LHC_ZANALYSIS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_LHC_ZANALYSIS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_LHC_ZANALYSIS.cc)
@@ -0,0 +1,131 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+#include "Rivet/Projections/ZFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  class MC_LHC_ZANALYSIS : public Analysis {
+  public:
+  
+    /// Default constructor
+    MC_LHC_ZANALYSIS()
+      : Analysis("MC_LHC_ZANALYSIS") 
+    {
+      const ChargedFinalState cfs;
+      addProjection(cfs, "CFS");
+      /// @todo Handle muon-decay Zs as well
+      const ZFinder zf(-MAXRAPIDITY, MAXRAPIDITY, 0.0*GeV, ELECTRON, 30.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zf, "ZF");
+      FastJets fastjets(zf.remainingFinalState(), FastJets::KT, 0.7);
+      addProjection(fastjets, "Jets");
+    }
+    
+
+    /// @name Analysis methods
+    /// @todo change "Weights" to differential cross sections once histos normalised to cross-section.
+    //@{
+
+    void init() { 
+      _hist_chargemulti = bookHistogram1D("d01-x01-y01", 30, 0.5, 250.5);
+      _hist_chargept = bookHistogram1D("d02-x01-y01", 32, 0., 25.);
+      _hist_chargemeanpt = bookHistogram1D("d03-x01-y01", 25, 0., 10.);
+      _hist_chargermspt = bookHistogram1D("d04-x01-y01", 32, 0., 10.);
+      _hist_zcount = bookHistogram1D("d05-x01-y01", 30, 0., 15.);
+      _hist_zpt = bookHistogram1D("d06-x01-y01", 32, 0., 25.);
+      _hist_zlogpt = bookHistogram1D("d07-x01-y01", 32, 0., 6.);
+      _hist_zeta = bookHistogram1D("d08-x01-y01", 32, -6., 6.);
+      _hist_zphi = bookHistogram1D("d09-x01-y01", 32, 0., 6.4);
+      _hist_zmass = bookHistogram1D("d10-x01-y01", 40, 60., 100.);
+      _hist_zlogmass = bookHistogram1D("d11-x01-y01", 32, 0., 10.);
+      _hist_jetcount = bookHistogram1D("d12-x01-y01", 32, 0, 100);
+      _hist_jetpt = bookHistogram1D("d13-x01-y01", 32, 20., 100.);
+      _hist_jetlogpt = bookHistogram1D("d14-x01-y01", 32, 0., 20.);
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      const FinalState& cfs = applyProjection<FinalState>(event, "CFS");
+      const ZFinder& zf = applyProjection<ZFinder>(event, "ZF");
+      const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
+      const Jets jets = fastjets.jetsByPt();
+    
+      // Charged particles part    
+      _hist_chargemulti->fill(cfs.particles().size(), weight);
+      double meanpt(0), rmspt(0);
+      foreach (const Particle& p, cfs.particles()) {
+        const double pT = p.momentum().pT();
+        _hist_chargept->fill(pT/GeV, weight);
+        meanpt += pT;
+        rmspt += pT*pT;
+      }
+      meanpt = meanpt / cfs.particles().size();
+      _hist_chargemeanpt->fill(meanpt/GeV, weight);
+      rmspt = sqrt(rmspt / cfs.particles().size());
+      _hist_chargermspt->fill(rmspt/GeV, weight);
+      
+      // Z part
+      _hist_zcount->fill(zf.particles().size(), weight);
+      foreach (const Particle& zp, zf.particles()) {
+        const double pT = zp.momentum().pT();
+        _hist_zpt->fill(pT/GeV, weight);
+        _hist_zlogpt->fill(log(pT/GeV), weight);
+        _hist_zeta->fill(zp.momentum().pseudorapidity(), weight);
+        _hist_zphi->fill(zp.momentum().azimuthalAngle(), weight);
+        const double m = zp.momentum().mass();
+        _hist_zmass->fill(m/GeV, weight);
+        _hist_zlogmass->fill(log(m/GeV), weight);	
+      }
+      
+      // Jet part
+      _hist_jetcount->fill(fastjets.size(), weight);
+      foreach(const Jet& j, fastjets.jetsByPt()) {
+        const double pT = j.momentum().pT();
+        _hist_jetpt->fill(pT/GeV, weight);
+        _hist_jetlogpt->fill(log(pT/GeV), weight);
+      }
+    }
+    
+    
+    void finalize() {
+      ///@todo Obtain cross-sections from generator and normalise histos to them.
+    }
+    
+    //@}
+    
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _hist_chargemulti;
+    AIDA::IHistogram1D* _hist_chargept;
+    AIDA::IHistogram1D* _hist_chargemeanpt;
+    AIDA::IHistogram1D* _hist_chargermspt;
+    AIDA::IHistogram1D* _hist_zcount;
+    AIDA::IHistogram1D* _hist_zpt;
+    AIDA::IHistogram1D* _hist_zlogpt;
+    //AIDA::IHistogram1D* _hist_zpthigh;
+    //AIDA::IHistogram1D* _hist_zlogpthigh;
+    AIDA::IHistogram1D* _hist_zeta;
+    AIDA::IHistogram1D* _hist_zphi;
+    AIDA::IHistogram1D* _hist_zmass;
+    AIDA::IHistogram1D* _hist_zlogmass;
+    AIDA::IHistogram1D* _hist_jetcount;
+    AIDA::IHistogram1D* _hist_jetpt;
+    AIDA::IHistogram1D* _hist_jetlogpt;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_LHC_ZANALYSIS> plugin_MC_LHC_ZANALYSIS;
+
+}

Copied: trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc (from r1802, trunk/src/Analyses/MC/MC_TVT1960_PHOTONJETS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_TVT1960_PHOTONJETS.cc)
@@ -0,0 +1,126 @@
+// -*- C++ -*-
+#include "Rivet/Analyses/MC_JetAnalysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/LeadingParticlesFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+  class MC_TVT1960_PHOTONJETS : public MC_JetAnalysis {
+  public:
+    
+    /// Default constructor
+    MC_TVT1960_PHOTONJETS()
+      : MC_JetAnalysis("MC_TVT1960_PHOTONJETS", 1960.0, 4, "Jets")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      // General FS
+      FinalState fs(-5.0, 5.0);
+      addProjection(fs, "FS");
+      
+      // Get leading photon
+      LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
+      photonfs.addParticleId(PHOTON);
+      addProjection(photonfs, "LeadingPhoton");
+      
+      // FS for jets excludes the leading photon
+      VetoedFinalState vfs(fs);
+      vfs.addVetoOnThisFinalState(photonfs);
+      addProjection(vfs, "JetFS");
+      FastJets jetpro(vfs, FastJets::KT, 0.7, 20.0*GeV);
+      addProjection(jetpro, "Jets");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{ 
+    
+    /// Book histograms
+    void init() {
+      _h_photon_pT = bookHistogram1D("photon_pT", 50, 0.0, 500.0);
+      _h_photon_y = bookHistogram1D("photon_y", 20, -1.0, 1.0);
+      _h_photon_jet1_deta = bookHistogram1D("photon_jet1_deta", 50, -5.0, 5.0);
+      _h_photon_jet1_dR = bookHistogram1D("photon_jet1_dR", 25, 0.5, 7.0);
+
+      MC_JetAnalysis::init();
+    }
+    
+
+    /// Do the analysis 
+    void analyze(const Event& e) {
+      const double weight = e.weight();
+    
+      // Get the photon
+      const ParticleVector photons = applyProjection<FinalState>(e, "LeadingPhoton").particles();
+      if (photons.size() != 1) {
+        vetoEvent;
+      }
+      const FourMomentum photon = photons.front().momentum();
+      
+      // Get all charged particles
+      const FinalState& fs = applyProjection<FinalState>(e, "JetFS");
+      if (fs.empty()) {
+        vetoEvent;
+      }
+      
+      // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
+      const double egamma = photon.E();
+      double econe = 0.0;
+      foreach (const Particle& p, fs.particles()) {
+        if (deltaR(photon, p.momentum()) < 0.4) {
+          econe += p.momentum().E();
+          // Veto as soon as E_cone gets larger
+          if (econe/egamma > 0.07) {
+            vetoEvent;
+          }
+        }
+      }
+      
+      _h_photon_pT->fill(photon.pT(),weight);
+      _h_photon_y->fill(photon.rapidity(),weight);
+      
+      const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
+      const Jets& jets = jetpro.jetsByPt(20.0*GeV);
+      if (jets.size()>0) {
+        _h_photon_jet1_deta->fill(photon.eta()-jets[0].momentum().eta(), weight);
+        _h_photon_jet1_dR->fill(deltaR(photon, jets[0].momentum()), weight);
+      }
+      
+      MC_JetAnalysis::analyze(e);
+    }
+    
+    
+    // Finalize
+    void finalize() {
+      scale(_h_photon_pT, crossSection()/sumOfWeights());
+      scale(_h_photon_y, crossSection()/sumOfWeights());
+      scale(_h_photon_jet1_deta, crossSection()/sumOfWeights());
+      scale(_h_photon_jet1_dR, crossSection()/sumOfWeights());
+      
+      MC_JetAnalysis::finalize();
+    }
+
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_photon_pT;
+    AIDA::IHistogram1D * _h_photon_y;
+    AIDA::IHistogram1D * _h_photon_jet1_deta;
+    AIDA::IHistogram1D * _h_photon_jet1_dR;
+    //@}
+
+  };
+    
+    
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_TVT1960_PHOTONJETS> plugin_MC_TVT1960_PHOTONJETS;
+
+}

Copied: trunk/src/Analyses/MC_TVT1960_ZJETS.cc (from r1802, trunk/src/Analyses/MC/MC_TVT1960_ZJETS.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/MC_TVT1960_ZJETS.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/MC/MC_TVT1960_ZJETS.cc)
@@ -0,0 +1,113 @@
+// -*- C++ -*-
+#include "Rivet/Analyses/MC_JetAnalysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ZFinder.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+  /// @brief Monte Carlo validation observables for Z[e+ e-] + jets production at Tevatron Run II
+  /// @author Frank Siegert
+  class MC_TVT1960_ZJETS : public MC_JetAnalysis {
+
+  public:
+
+    /// Default constructor
+    MC_TVT1960_ZJETS()
+      : MC_JetAnalysis("MC_TVT1960_ZJETS", 1960.0, 4, "Jets")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      setNeedsCrossSection(true);
+      
+      ZFinder zfinder(-3.5, 3.5, 25.0*GeV, ELECTRON, 65.0*GeV, 115.0*GeV, 0.2);
+      addProjection(zfinder, "ZFinder");
+      FastJets jetpro(zfinder.remainingFinalState(), FastJets::KT, 0.7, 20.0*GeV);
+      addProjection(jetpro, "Jets");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_Z_mass = bookHistogram1D("Z_mass", 50, 66.0, 116.0);
+      _h_Z_pT = bookHistogram1D("Z_pT", 100, 0.0, 500.0);
+      _h_Z_y = bookHistogram1D("Z_y", 40, -4.0, 4.0);
+      _h_Z_jet1_deta = bookHistogram1D("Z_jet1_deta", 50, -5.0, 5.0);
+      _h_Z_jet1_dR = bookHistogram1D("Z_jet1_dR", 25, 0.5, 7.0);
+      _h_lepton_pT = bookHistogram1D("lepton_pT", 100, 0.0, 500.0);
+      _h_lepton_eta = bookHistogram1D("lepton_eta", 40, -4.0, 4.0);
+      
+      MC_JetAnalysis::init();
+    }
+    
+
+    
+    /// Do the analysis 
+    void analyze(const Event & e) {
+      const double weight = e.weight();
+      
+      const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
+      if (zfinder.particles().size()!=1) {
+        vetoEvent;
+      }
+      
+      FourMomentum zmom(zfinder.particles()[0].momentum());
+      _h_Z_mass->fill(zmom.mass(),weight);
+      _h_Z_pT->fill(zmom.pT(),weight);
+      _h_Z_y->fill(zmom.rapidity(),weight);
+      foreach (const Particle& l, zfinder.constituentsFinalState().particles()) {
+        _h_lepton_pT->fill(l.momentum().pT(), weight);
+        _h_lepton_eta->fill(l.momentum().eta(), weight);
+      }
+      
+      const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
+      const Jets& jets = jetpro.jetsByPt(20.0*GeV);
+      if (jets.size() > 0) {
+        _h_Z_jet1_deta->fill(zmom.eta()-jets[0].momentum().eta(), weight);
+        _h_Z_jet1_dR->fill(deltaR(zmom, jets[0].momentum()), weight);
+      }
+      
+      MC_JetAnalysis::analyze(e);
+    }
+    
+
+    /// Finalize
+    void finalize() {
+      scale(_h_Z_mass, crossSection()/sumOfWeights());
+      scale(_h_Z_pT, crossSection()/sumOfWeights());
+      scale(_h_Z_y, crossSection()/sumOfWeights());
+      scale(_h_Z_jet1_deta, crossSection()/sumOfWeights());
+      scale(_h_Z_jet1_dR, crossSection()/sumOfWeights());
+      scale(_h_lepton_pT, crossSection()/sumOfWeights());
+      scale(_h_lepton_eta, crossSection()/sumOfWeights());
+      
+      MC_JetAnalysis::finalize();
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_Z_mass;
+    AIDA::IHistogram1D * _h_Z_pT;
+    AIDA::IHistogram1D * _h_Z_y;
+    AIDA::IHistogram1D * _h_Z_jet1_deta;
+    AIDA::IHistogram1D * _h_Z_jet1_dR;
+    AIDA::IHistogram1D * _h_lepton_pT;
+    AIDA::IHistogram1D * _h_lepton_eta;
+    //@}
+
+  };
+
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<MC_TVT1960_ZJETS> plugin_MC_TVT1960_ZJETS;
+  
+}

Modified: trunk/src/Analyses/Makefile.am
==============================================================================
--- trunk/src/Analyses/Makefile.am	Wed Sep  2 09:03:31 2009	(r1802)
+++ trunk/src/Analyses/Makefile.am	Wed Sep  2 11:16:20 2009	(r1803)
@@ -1,5 +1,110 @@
-SUBDIRS = Example RHIC CDF D0 SPS LEP HERA RHIC MC Misc
-
 ## Flags for building all plugins
 AM_LDFLAGS = $(LDFLAGS) -module -L$(FASTJETLIBPATH)
 LIBS = -lCDFConesPlugin -lSISConePlugin -lsiscone -lJadePlugin
+lib_LTLIBRARIES =
+
+
+lib_LTLIBRARIES += RivetExampleAnalyses.la
+RivetExampleAnalyses_la_SOURCES = \
+    ExampleAnalysis.cc \
+    ExampleTree.cc
+
+
+lib_LTLIBRARIES += RivetCDFAnalyses.la
+RivetCDFAnalyses_la_SOURCES = \
+    CDF_1988_S1865951.cc \
+    CDF_1990_S2089246.cc \
+    CDF_1994_S2952106.cc \
+    CDF_2000_S4155203.cc \
+    CDF_2001_S4751469.cc \
+    CDF_2002_S4796047.cc \
+    CDF_2004_S5839831.cc \
+    CDF_2005_S6080774.cc \
+    CDF_2005_S6217184.cc \
+    CDF_2006_S6450792.cc \
+    CDF_2006_S6653332.cc \
+    CDF_2007_S7057202.cc \
+    CDF_2008_S7541902.cc \
+    CDF_2008_NOTE_9351.cc \
+    CDF_2008_LEADINGJETS.cc \
+    CDF_2008_S7540469.cc \
+    CDF_2008_S7782535.cc \
+    CDF_2008_S7828950.cc \
+    CDF_2008_S8093652.cc \
+    CDF_2008_S8095620.cc \
+    CDF_2009_S8233977.cc
+
+
+lib_LTLIBRARIES += RivetD0Analyses.la
+RivetD0Analyses_la_SOURCES = \
+    D0_1996_S3214044.cc \
+    D0_1996_S3324664.cc \
+    D0_2001_S4674421.cc \
+    D0_2004_S5992206.cc \
+    D0_2006_S6438750.cc \
+    D0_2007_S7075677.cc \
+    D0_2008_S6879055.cc \
+    D0_2008_S7554427.cc \
+    D0_2008_S7662670.cc \
+    D0_2008_S7719523.cc \
+    D0_2008_S7837160.cc \
+    D0_2008_S7863608.cc \
+    D0_2009_S8202443.cc \
+    D0_2009_S8320160.cc \
+    D0_2009_S8349509.cc
+
+
+lib_LTLIBRARIES += RivetHERAAnalyses.la
+RivetHERAAnalyses_la_SOURCES = \
+    H1_1994_S2919893.cc \
+    H1_1995_S3167097.cc \
+    H1_2000_S4129130.cc \
+    ZEUS_2001_S4815815.cc
+
+
+lib_LTLIBRARIES += RivetLEPAnalyses.la
+RivetLEPAnalyses_la_SOURCES = \
+    ALEPH_1991_S2435284.cc \
+    ALEPH_1996_S3486095.cc \
+    DELPHI_1995_S3137023.cc \
+    DELPHI_1996_S3430090.cc \
+    DELPHI_2002_069_CONF_603.cc \
+    DELPHI_2003_WUD_03_11.cc \
+    OPAL_1998_S3780481.cc
+    #OPAL_2004_S6132243.cc 
+
+
+lib_LTLIBRARIES += RivetRHICAnalyses.la
+RivetRHICAnalyses_la_SOURCES = \
+    STAR_2006_S6870392.cc \
+    STAR_2008_S7993412.cc
+    #STAR_2009_UE_HELEN.cc
+
+
+lib_LTLIBRARIES += RivetSPSAnalyses.la
+RivetSPSAnalyses_la_SOURCES = \
+    UA1_1990_S2044935.cc \
+    UA5_1982_S875503.cc \
+    UA5_1986_S1583476.cc \
+    UA5_1988_S1867512.cc \
+    UA5_1989_S1926373.cc
+
+
+lib_LTLIBRARIES += RivetMiscAnalyses.la
+RivetMiscAnalyses_la_SOURCES = \
+    PDG_Hadron_Multiplicities.cc \
+    PDG_Hadron_Multiplicities_Ratios.cc \
+    SFM_1984_S1178091.cc \
+    E735_1998_S3905616.cc \
+    JADE_OPAL_2000_S4300807.cc
+
+
+lib_LTLIBRARIES += RivetMCAnalyses.la
+RivetMCAnalyses_la_SOURCES = \
+    MC_JetAnalysis.cc \
+    MC_TVT1960_PHOTONJETS.cc \
+    MC_TVT1960_ZJETS.cc \
+    MC_LHC_LEADINGJETS.cc \
+    MC_LHC_DIJET.cc \
+    MC_LHC_WANALYSIS.cc \
+    MC_LHC_ZANALYSIS.cc

Copied: trunk/src/Analyses/OPAL_1998_S3780481.cc (from r1802, trunk/src/Analyses/LEP/OPAL_1998_S3780481.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/OPAL_1998_S3780481.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/OPAL_1998_S3780481.cc)
@@ -0,0 +1,196 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/InitialQuarks.hh"
+
+namespace Rivet {
+
+
+  /// @brief OPAL flavour dependent fragmentation paper
+  /// @author Hendrik Hoeth
+  class OPAL_1998_S3780481 : public Analysis {
+  public:
+    
+    /// Constructor
+    OPAL_1998_S3780481() 
+      : Analysis("OPAL_1998_S3780481")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(InitialQuarks(), "IQF");
+      _weightedTotalPartNum = 0;
+      _SumOfudsWeights = 0;
+      _SumOfcWeights = 0;
+      _SumOfbWeights = 0;
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+      _weightedTotalPartNum += numParticles * weight;
+      
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+      
+      int flavour = 0;
+      const InitialQuarks& iqf = applyProjection<InitialQuarks>(e, "IQF");
+      
+      // If we only have two quarks (qqbar), just take the flavour.
+      // If we have more than two quarks, look for the highest energetic q-qbar pair.
+      if (iqf.particles().size() == 2) {
+        flavour = abs( iqf.particles().front().pdgId() );
+      }
+      else {
+        map<int, double> quarkmap;
+        foreach (const Particle& p, iqf.particles()) {
+          if (quarkmap[p.pdgId()] < p.momentum().E()) {
+            quarkmap[p.pdgId()] = p.momentum().E();
+          }
+        }
+        double maxenergy = 0.;
+        for (int i = 1; i <= 5; ++i) {
+          if (quarkmap[i]+quarkmap[-i] > maxenergy) {
+            flavour = i;
+          }
+        }
+      }
+      
+      switch (flavour) {
+      case 1:
+      case 2:
+      case 3:
+        _SumOfudsWeights += weight;
+        break;
+      case 4:
+        _SumOfcWeights += weight;
+        break;
+      case 5:
+        _SumOfbWeights += weight;
+        break;
+      }
+      
+      foreach (const Particle& p, fs.particles()) {
+        const double xp = p.momentum().vector3().mod()/meanBeamMom;
+        const double logxp = -std::log(xp);
+        _histXpall->fill(xp, weight);
+        _histLogXpall->fill(logxp, weight);
+        _histMultiChargedall->fill(_histMultiChargedall->binMean(0), weight);
+        switch (flavour) {
+          /// @todo Use PDG code enums
+        case 1:
+        case 2:
+        case 3:
+          _histXpuds->fill(xp, weight);
+          _histLogXpuds->fill(logxp, weight);
+          _histMultiChargeduds->fill(_histMultiChargeduds->binMean(0), weight);
+          break;
+        case 4:
+          _histXpc->fill(xp, weight);
+          _histLogXpc->fill(logxp, weight);
+          _histMultiChargedc->fill(_histMultiChargedc->binMean(0), weight);
+          break;
+        case 5:
+          _histXpb->fill(xp, weight);
+          _histLogXpb->fill(logxp, weight);
+          _histMultiChargedb->fill(_histMultiChargedb->binMean(0), weight);
+          break;
+        }
+      }
+      
+    }
+    
+    
+    void init() {
+      _histXpuds           = bookHistogram1D(1, 1, 1);
+      _histXpc             = bookHistogram1D(2, 1, 1);
+      _histXpb             = bookHistogram1D(3, 1, 1);
+      _histXpall           = bookHistogram1D(4, 1, 1);
+      _histLogXpuds        = bookHistogram1D(5, 1, 1);
+      _histLogXpc          = bookHistogram1D(6, 1, 1);
+      _histLogXpb          = bookHistogram1D(7, 1, 1);
+      _histLogXpall        = bookHistogram1D(8, 1, 1);
+      _histMultiChargeduds = bookHistogram1D(9, 1, 1);
+      _histMultiChargedc   = bookHistogram1D(9, 1, 2);
+      _histMultiChargedb   = bookHistogram1D(9, 1, 3);
+      _histMultiChargedall = bookHistogram1D(9, 1, 4);
+    }
+    
+    
+    /// Finalize
+    void finalize() {
+      const double avgNumParts = _weightedTotalPartNum / sumOfWeights();
+      normalize(_histXpuds    , avgNumParts);
+      normalize(_histXpc      , avgNumParts);
+      normalize(_histXpb      , avgNumParts);
+      normalize(_histXpall    , avgNumParts);
+      normalize(_histLogXpuds , avgNumParts);
+      normalize(_histLogXpc   , avgNumParts);
+      normalize(_histLogXpb   , avgNumParts);
+      normalize(_histLogXpall , avgNumParts);
+      
+      scale(_histMultiChargeduds, 1.0/_SumOfudsWeights);
+      scale(_histMultiChargedc  , 1.0/_SumOfcWeights);
+      scale(_histMultiChargedb  , 1.0/_SumOfbWeights);
+      scale(_histMultiChargedall, 1.0/sumOfWeights());
+    }
+    
+    //@}
+
+
+  private:
+
+    /// Store the weighted sums of numbers of charged / charged+neutral
+    /// particles - used to calculate average number of particles for the 
+    /// inclusive single particle distributions' normalisations.
+    double _weightedTotalPartNum;
+
+    double _SumOfudsWeights;
+    double _SumOfcWeights;
+    double _SumOfbWeights;
+
+    AIDA::IHistogram1D *_histXpuds;
+    AIDA::IHistogram1D *_histXpc;
+    AIDA::IHistogram1D *_histXpb;
+    AIDA::IHistogram1D *_histXpall;
+    AIDA::IHistogram1D *_histLogXpuds;
+    AIDA::IHistogram1D *_histLogXpc;
+    AIDA::IHistogram1D *_histLogXpb;
+    AIDA::IHistogram1D *_histLogXpall;
+    AIDA::IHistogram1D *_histMultiChargeduds;
+    AIDA::IHistogram1D *_histMultiChargedc;
+    AIDA::IHistogram1D *_histMultiChargedb;
+    AIDA::IHistogram1D *_histMultiChargedall;
+
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<OPAL_1998_S3780481> plugin_OPAL_1998_S3780481;
+  
+}

Copied: trunk/src/Analyses/OPAL_2004_S6132243.cc (from r1802, trunk/src/Analyses/LEP/OPAL_2004_S6132243.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/OPAL_2004_S6132243.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/LEP/OPAL_2004_S6132243.cc)
@@ -0,0 +1,22 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+
+namespace Rivet {
+
+
+  class OPAL_2004_S6132243 : public Analysis { 
+
+    OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243") { }
+    void init() { }
+    void analyze(const Event & event) { }
+    void finalize() { }
+
+  };
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<OPAL_2004_S6132243> plugin_OPAL_2004_S6132243;
+
+}

Copied: trunk/src/Analyses/PDG_Hadron_Multiplicities.cc (from r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/PDG_Hadron_Multiplicities.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities.cc)
@@ -0,0 +1,835 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/UnstableFinalState.hh"
+
+namespace Rivet {
+
+
+  /// @brief Implementation of PDG hadron multiplicities
+  /// @author Hendrik Hoeth
+  class PDG_HADRON_MULTIPLICITIES : public Analysis {
+  public:
+    
+    /// Constructor
+    PDG_HADRON_MULTIPLICITIES() 
+      : Analysis("PDG_HADRON_MULTIPLICITIES")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(UnstableFinalState(), "UFS");
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+
+      // Get event weight for histo filling
+      const double weight = e.weight();
+
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+
+
+      if (2*meanBeamMom >= 9.5 && 2*meanBeamMom <= 10.5) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _hist10MeanMultiPiPlus->fill(_hist10MeanMultiPiPlus->binMean(0), weight);
+                break;
+             case 111:
+                _hist10MeanMultiPi0->fill(_hist10MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist10MeanMultiKPlus->fill(_hist10MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist10MeanMultiK0->fill(_hist10MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist10MeanMultiEta->fill(_hist10MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist10MeanMultiEtaPrime->fill(_hist10MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist10MeanMultiDPlus->fill(_hist10MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist10MeanMultiD0->fill(_hist10MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist10MeanMultiDPlus_s->fill(_hist10MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist10MeanMultiF0_980->fill(_hist10MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 113:
+                _hist10MeanMultiRho770_0->fill(_hist10MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 223:
+                _hist10MeanMultiOmega782->fill(_hist10MeanMultiOmega782->binMean(0), weight);
+                break;
+             case 323:
+                _hist10MeanMultiKStar892Plus->fill(_hist10MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist10MeanMultiKStar892_0->fill(_hist10MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist10MeanMultiPhi1020->fill(_hist10MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist10MeanMultiDStar2010Plus->fill(_hist10MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 423:
+                _hist10MeanMultiDStar2007_0->fill(_hist10MeanMultiDStar2007_0->binMean(0), weight);
+                break;
+             case 433:
+                _hist10MeanMultiDStar_s2112Plus->fill(_hist10MeanMultiDStar_s2112Plus->binMean(0), weight);
+                break;
+             case 443:
+                _hist10MeanMultiJPsi1S->fill(_hist10MeanMultiJPsi1S->binMean(0), weight);
+                break;
+             case 225:
+                _hist10MeanMultiF2_1270->fill(_hist10MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 2212:
+                _hist10MeanMultiP->fill(_hist10MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist10MeanMultiLambda->fill(_hist10MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3212:
+                _hist10MeanMultiSigma0->fill(_hist10MeanMultiSigma0->binMean(0), weight);
+                break;
+             case 3312:
+                _hist10MeanMultiXiMinus->fill(_hist10MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 2224:
+                _hist10MeanMultiDelta1232PlusPlus->fill(_hist10MeanMultiDelta1232PlusPlus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist10MeanMultiSigma1385Minus->fill(_hist10MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist10MeanMultiSigma1385PlusMinus->fill(_hist10MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist10MeanMultiSigma1385Plus->fill(_hist10MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist10MeanMultiSigma1385PlusMinus->fill(_hist10MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3324:
+                _hist10MeanMultiXi1530_0->fill(_hist10MeanMultiXi1530_0->binMean(0), weight);
+                break;
+             case 3334:
+                _hist10MeanMultiOmegaMinus->fill(_hist10MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist10MeanMultiLambda_c_Plus->fill(_hist10MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+             case 4222:
+             case 4112:
+                _hist10MeanMultiSigma_c_PlusPlus_0->fill(_hist10MeanMultiSigma_c_PlusPlus_0->binMean(0), weight);
+                break;
+             case 3124:
+                _hist10MeanMultiLambda1520->fill(_hist10MeanMultiLambda1520->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+      if (2*meanBeamMom >= 29 && 2*meanBeamMom <= 35) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _hist32MeanMultiPiPlus->fill(_hist32MeanMultiPiPlus->binMean(0), weight);
+                break;
+             case 111:
+                _hist32MeanMultiPi0->fill(_hist32MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist32MeanMultiKPlus->fill(_hist32MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist32MeanMultiK0->fill(_hist32MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist32MeanMultiEta->fill(_hist32MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist32MeanMultiEtaPrime->fill(_hist32MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist32MeanMultiDPlus->fill(_hist32MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist32MeanMultiD0->fill(_hist32MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist32MeanMultiDPlus_s->fill(_hist32MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist32MeanMultiF0_980->fill(_hist32MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 113:
+                _hist32MeanMultiRho770_0->fill(_hist32MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 323:
+                _hist32MeanMultiKStar892Plus->fill(_hist32MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist32MeanMultiKStar892_0->fill(_hist32MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist32MeanMultiPhi1020->fill(_hist32MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist32MeanMultiDStar2010Plus->fill(_hist32MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 423:
+                _hist32MeanMultiDStar2007_0->fill(_hist32MeanMultiDStar2007_0->binMean(0), weight);
+                break;
+             case 225:
+                _hist32MeanMultiF2_1270->fill(_hist32MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 325:
+                _hist32MeanMultiK2Star1430Plus->fill(_hist32MeanMultiK2Star1430Plus->binMean(0), weight);
+                break;
+             case 315:
+                _hist32MeanMultiK2Star1430_0->fill(_hist32MeanMultiK2Star1430_0->binMean(0), weight);
+                break;
+             case 2212:
+                _hist32MeanMultiP->fill(_hist32MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist32MeanMultiLambda->fill(_hist32MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3312:
+                _hist32MeanMultiXiMinus->fill(_hist32MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist32MeanMultiSigma1385Minus->fill(_hist32MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist32MeanMultiSigma1385PlusMinus->fill(_hist32MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist32MeanMultiSigma1385Plus->fill(_hist32MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist32MeanMultiSigma1385PlusMinus->fill(_hist32MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3334:
+                _hist32MeanMultiOmegaMinus->fill(_hist32MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist32MeanMultiLambda_c_Plus->fill(_hist32MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+
+
+      if (2*meanBeamMom >= 89.5 && 2*meanBeamMom <= 91.8) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _hist91MeanMultiPiPlus->fill(_hist91MeanMultiPiPlus->binMean(0), weight);
+                break;
+             case 111:
+                _hist91MeanMultiPi0->fill(_hist91MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist91MeanMultiKPlus->fill(_hist91MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist91MeanMultiK0->fill(_hist91MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist91MeanMultiEta->fill(_hist91MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist91MeanMultiEtaPrime->fill(_hist91MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist91MeanMultiDPlus->fill(_hist91MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist91MeanMultiD0->fill(_hist91MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist91MeanMultiDPlus_s->fill(_hist91MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 511:
+                _hist91MeanMultiBPlus_B0_d->fill(_hist91MeanMultiBPlus_B0_d->binMean(0), weight);
+                break;
+             case 521:
+                _hist91MeanMultiBPlus_B0_d->fill(_hist91MeanMultiBPlus_B0_d->binMean(0), weight);
+                _hist91MeanMultiBPlus_u->fill(_hist91MeanMultiBPlus_u->binMean(0), weight);
+                break;
+             case 531:
+                _hist91MeanMultiB0_s->fill(_hist91MeanMultiB0_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist91MeanMultiF0_980->fill(_hist91MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 9000211:
+                _hist91MeanMultiA0_980Plus->fill(_hist91MeanMultiA0_980Plus->binMean(0), weight);
+                break;
+             case 113:
+                _hist91MeanMultiRho770_0->fill(_hist91MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 213:
+                _hist91MeanMultiRho770Plus->fill(_hist91MeanMultiRho770Plus->binMean(0), weight);
+                break;
+             case 223:
+                _hist91MeanMultiOmega782->fill(_hist91MeanMultiOmega782->binMean(0), weight);
+                break;
+             case 323:
+                _hist91MeanMultiKStar892Plus->fill(_hist91MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist91MeanMultiKStar892_0->fill(_hist91MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist91MeanMultiPhi1020->fill(_hist91MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist91MeanMultiDStar2010Plus->fill(_hist91MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 433:
+                _hist91MeanMultiDStar_s2112Plus->fill(_hist91MeanMultiDStar_s2112Plus->binMean(0), weight);
+                break;
+             case 513:
+             case 523:
+             case 533:
+                _hist91MeanMultiBStar->fill(_hist91MeanMultiBStar->binMean(0), weight);
+                break;
+             case 443:
+                _hist91MeanMultiJPsi1S->fill(_hist91MeanMultiJPsi1S->binMean(0), weight);
+                break;
+             case 100443:
+                _hist91MeanMultiPsi2S->fill(_hist91MeanMultiPsi2S->binMean(0), weight);
+                break;
+             case 553:
+                _hist91MeanMultiUpsilon1S->fill(_hist91MeanMultiUpsilon1S->binMean(0), weight);
+                break;
+             case 20223:
+                _hist91MeanMultiF1_1285->fill(_hist91MeanMultiF1_1285->binMean(0), weight);
+                break;
+             case 20333:
+                _hist91MeanMultiF1_1420->fill(_hist91MeanMultiF1_1420->binMean(0), weight);
+                break;
+             case 445:
+                _hist91MeanMultiChi_c1_3510->fill(_hist91MeanMultiChi_c1_3510->binMean(0), weight);
+                break;
+             case 225:
+                _hist91MeanMultiF2_1270->fill(_hist91MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 335:
+                _hist91MeanMultiF2Prime1525->fill(_hist91MeanMultiF2Prime1525->binMean(0), weight);
+                break;
+             case 315:
+                _hist91MeanMultiK2Star1430_0->fill(_hist91MeanMultiK2Star1430_0->binMean(0), weight);
+                break;
+             case 515:
+             case 525:
+             case 535:
+                _hist91MeanMultiBStarStar->fill(_hist91MeanMultiBStarStar->binMean(0), weight);
+                break;
+             case 10433:
+             case 20433:
+                _hist91MeanMultiDs1Plus->fill(_hist91MeanMultiDs1Plus->binMean(0), weight);
+                break;
+             case 435:
+                _hist91MeanMultiDs2Plus->fill(_hist91MeanMultiDs2Plus->binMean(0), weight);
+                break;
+             case 2212:
+                _hist91MeanMultiP->fill(_hist91MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist91MeanMultiLambda->fill(_hist91MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3212:
+                _hist91MeanMultiSigma0->fill(_hist91MeanMultiSigma0->binMean(0), weight);
+                break;
+             case 3112:
+                _hist91MeanMultiSigmaMinus->fill(_hist91MeanMultiSigmaMinus->binMean(0), weight);
+                _hist91MeanMultiSigmaPlusMinus->fill(_hist91MeanMultiSigmaPlusMinus->binMean(0), weight);
+                break;
+             case 3222:
+                _hist91MeanMultiSigmaPlus->fill(_hist91MeanMultiSigmaPlus->binMean(0), weight);
+                _hist91MeanMultiSigmaPlusMinus->fill(_hist91MeanMultiSigmaPlusMinus->binMean(0), weight);
+                break;
+             case 3312:
+                _hist91MeanMultiXiMinus->fill(_hist91MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 2224:
+                _hist91MeanMultiDelta1232PlusPlus->fill(_hist91MeanMultiDelta1232PlusPlus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist91MeanMultiSigma1385Minus->fill(_hist91MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist91MeanMultiSigma1385PlusMinus->fill(_hist91MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist91MeanMultiSigma1385Plus->fill(_hist91MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist91MeanMultiSigma1385PlusMinus->fill(_hist91MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3324:
+                _hist91MeanMultiXi1530_0->fill(_hist91MeanMultiXi1530_0->binMean(0), weight);
+                break;
+             case 3334:
+                _hist91MeanMultiOmegaMinus->fill(_hist91MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist91MeanMultiLambda_c_Plus->fill(_hist91MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+             case 5122:
+                _hist91MeanMultiLambda_b_0->fill(_hist91MeanMultiLambda_b_0->binMean(0), weight);
+                break;
+             case 3124:
+                _hist91MeanMultiLambda1520->fill(_hist91MeanMultiLambda1520->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+
+
+      if (2*meanBeamMom >= 130 && 2*meanBeamMom <= 200) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _hist165MeanMultiPiPlus->fill(_hist165MeanMultiPiPlus->binMean(0), weight);
+                break;
+             case 321:
+                _hist165MeanMultiKPlus->fill(_hist165MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist165MeanMultiK0->fill(_hist165MeanMultiK0->binMean(0), weight);
+                break;
+             case 2212:
+                _hist165MeanMultiP->fill(_hist165MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist165MeanMultiLambda->fill(_hist165MeanMultiLambda->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+
+    }
+
+
+
+    void init() {
+      _hist10MeanMultiPiPlus             = bookHistogram1D( 1, 1, 1);
+      _hist10MeanMultiPi0                = bookHistogram1D( 2, 1, 1);
+      _hist10MeanMultiKPlus              = bookHistogram1D( 3, 1, 1);
+      _hist10MeanMultiK0                 = bookHistogram1D( 4, 1, 1);
+      _hist10MeanMultiEta                = bookHistogram1D( 5, 1, 1);
+      _hist10MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 1);
+      _hist10MeanMultiDPlus              = bookHistogram1D( 7, 1, 1);
+      _hist10MeanMultiD0                 = bookHistogram1D( 8, 1, 1);
+      _hist10MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 1);
+      _hist10MeanMultiF0_980             = bookHistogram1D(13, 1, 1);
+      _hist10MeanMultiRho770_0           = bookHistogram1D(15, 1, 1);
+      _hist10MeanMultiOmega782           = bookHistogram1D(17, 1, 1);
+      _hist10MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 1);
+      _hist10MeanMultiKStar892_0         = bookHistogram1D(19, 1, 1);
+      _hist10MeanMultiPhi1020            = bookHistogram1D(20, 1, 1);
+      _hist10MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 1);
+      _hist10MeanMultiDStar2007_0        = bookHistogram1D(22, 1, 1);
+      _hist10MeanMultiDStar_s2112Plus    = bookHistogram1D(23, 1, 1);
+      _hist10MeanMultiJPsi1S             = bookHistogram1D(25, 1, 1);
+      _hist10MeanMultiF2_1270            = bookHistogram1D(31, 1, 1);
+      _hist10MeanMultiP                  = bookHistogram1D(38, 1, 1);
+      _hist10MeanMultiLambda             = bookHistogram1D(39, 1, 1);
+      _hist10MeanMultiSigma0             = bookHistogram1D(40, 1, 1);
+      _hist10MeanMultiXiMinus            = bookHistogram1D(44, 1, 1);
+      _hist10MeanMultiDelta1232PlusPlus  = bookHistogram1D(45, 1, 1);
+      _hist10MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 1);
+      _hist10MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 1);
+      _hist10MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 1);
+      _hist10MeanMultiXi1530_0           = bookHistogram1D(49, 1, 1);
+      _hist10MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 1);
+      _hist10MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 1);
+      _hist10MeanMultiSigma_c_PlusPlus_0 = bookHistogram1D(53, 1, 1);
+      _hist10MeanMultiLambda1520         = bookHistogram1D(54, 1, 1);
+
+      _hist32MeanMultiPiPlus             = bookHistogram1D( 1, 1, 2);
+      _hist32MeanMultiPi0                = bookHistogram1D( 2, 1, 2);
+      _hist32MeanMultiKPlus              = bookHistogram1D( 3, 1, 2);
+      _hist32MeanMultiK0                 = bookHistogram1D( 4, 1, 2);
+      _hist32MeanMultiEta                = bookHistogram1D( 5, 1, 2);
+      _hist32MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 2);
+      _hist32MeanMultiDPlus              = bookHistogram1D( 7, 1, 2);
+      _hist32MeanMultiD0                 = bookHistogram1D( 8, 1, 2);
+      _hist32MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 2);
+      _hist32MeanMultiF0_980             = bookHistogram1D(13, 1, 2);
+      _hist32MeanMultiRho770_0           = bookHistogram1D(15, 1, 2);
+      _hist32MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 2);
+      _hist32MeanMultiKStar892_0         = bookHistogram1D(19, 1, 2);
+      _hist32MeanMultiPhi1020            = bookHistogram1D(20, 1, 2);
+      _hist32MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 2);
+      _hist32MeanMultiDStar2007_0        = bookHistogram1D(22, 1, 2);
+      _hist32MeanMultiF2_1270            = bookHistogram1D(31, 1, 2);
+      _hist32MeanMultiK2Star1430Plus     = bookHistogram1D(33, 1, 1);
+      _hist32MeanMultiK2Star1430_0       = bookHistogram1D(34, 1, 1);
+      _hist32MeanMultiP                  = bookHistogram1D(38, 1, 2);
+      _hist32MeanMultiLambda             = bookHistogram1D(39, 1, 2);
+      _hist32MeanMultiXiMinus            = bookHistogram1D(44, 1, 2);
+      _hist32MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 2);
+      _hist32MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 2);
+      _hist32MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 2);
+      _hist32MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 2);
+      _hist32MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 2);
+
+      _hist91MeanMultiPiPlus             = bookHistogram1D( 1, 1, 3);
+      _hist91MeanMultiPi0                = bookHistogram1D( 2, 1, 3);
+      _hist91MeanMultiKPlus              = bookHistogram1D( 3, 1, 3);
+      _hist91MeanMultiK0                 = bookHistogram1D( 4, 1, 3);
+      _hist91MeanMultiEta                = bookHistogram1D( 5, 1, 3);
+      _hist91MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 3);
+      _hist91MeanMultiDPlus              = bookHistogram1D( 7, 1, 3);
+      _hist91MeanMultiD0                 = bookHistogram1D( 8, 1, 3);
+      _hist91MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 3);
+      _hist91MeanMultiBPlus_B0_d         = bookHistogram1D(10, 1, 1);
+      _hist91MeanMultiBPlus_u            = bookHistogram1D(11, 1, 1);
+      _hist91MeanMultiB0_s               = bookHistogram1D(12, 1, 1);
+      _hist91MeanMultiF0_980             = bookHistogram1D(13, 1, 3);
+      _hist91MeanMultiA0_980Plus         = bookHistogram1D(14, 1, 1);
+      _hist91MeanMultiRho770_0           = bookHistogram1D(15, 1, 3);
+      _hist91MeanMultiRho770Plus         = bookHistogram1D(16, 1, 1);
+      _hist91MeanMultiOmega782           = bookHistogram1D(17, 1, 2);
+      _hist91MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 3);
+      _hist91MeanMultiKStar892_0         = bookHistogram1D(19, 1, 3);
+      _hist91MeanMultiPhi1020            = bookHistogram1D(20, 1, 3);
+      _hist91MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 3);
+      _hist91MeanMultiDStar_s2112Plus    = bookHistogram1D(23, 1, 2);
+      _hist91MeanMultiBStar              = bookHistogram1D(24, 1, 1);
+      _hist91MeanMultiJPsi1S             = bookHistogram1D(25, 1, 2);
+      _hist91MeanMultiPsi2S              = bookHistogram1D(26, 1, 1);
+      _hist91MeanMultiUpsilon1S          = bookHistogram1D(27, 1, 1);
+      _hist91MeanMultiF1_1285            = bookHistogram1D(28, 1, 1);
+      _hist91MeanMultiF1_1420            = bookHistogram1D(29, 1, 1);
+      _hist91MeanMultiChi_c1_3510        = bookHistogram1D(30, 1, 1);
+      _hist91MeanMultiF2_1270            = bookHistogram1D(31, 1, 3);
+      _hist91MeanMultiF2Prime1525        = bookHistogram1D(32, 1, 1);
+      _hist91MeanMultiK2Star1430_0       = bookHistogram1D(34, 1, 2);
+      _hist91MeanMultiBStarStar          = bookHistogram1D(35, 1, 1);
+      _hist91MeanMultiDs1Plus            = bookHistogram1D(36, 1, 1);
+      _hist91MeanMultiDs2Plus            = bookHistogram1D(37, 1, 1);
+      _hist91MeanMultiP                  = bookHistogram1D(38, 1, 3);
+      _hist91MeanMultiLambda             = bookHistogram1D(39, 1, 3);
+      _hist91MeanMultiSigma0             = bookHistogram1D(40, 1, 2);
+      _hist91MeanMultiSigmaMinus         = bookHistogram1D(41, 1, 1);
+      _hist91MeanMultiSigmaPlus          = bookHistogram1D(42, 1, 1);
+      _hist91MeanMultiSigmaPlusMinus     = bookHistogram1D(43, 1, 1);
+      _hist91MeanMultiXiMinus            = bookHistogram1D(44, 1, 3);
+      _hist91MeanMultiDelta1232PlusPlus  = bookHistogram1D(45, 1, 2);
+      _hist91MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 3);
+      _hist91MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 3);
+      _hist91MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 3);
+      _hist91MeanMultiXi1530_0           = bookHistogram1D(49, 1, 2);
+      _hist91MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 3);
+      _hist91MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 3);
+      _hist91MeanMultiLambda_b_0         = bookHistogram1D(52, 1, 1);
+      _hist91MeanMultiLambda1520         = bookHistogram1D(54, 1, 2);
+
+      _hist165MeanMultiPiPlus            = bookHistogram1D( 1, 1, 4);
+      _hist165MeanMultiKPlus             = bookHistogram1D( 3, 1, 4);
+      _hist165MeanMultiK0                = bookHistogram1D( 4, 1, 4);
+      _hist165MeanMultiP                 = bookHistogram1D(38, 1, 4);
+      _hist165MeanMultiLambda            = bookHistogram1D(39, 1, 4);
+    }
+
+
+
+    // Finalize
+    void finalize() {
+      scale(_hist10MeanMultiPiPlus            , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiPi0               , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiKPlus             , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiK0                , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiEta               , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiEtaPrime          , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDPlus             , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiD0                , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDPlus_s           , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiF0_980            , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiRho770_0          , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiOmega782          , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiKStar892Plus      , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiKStar892_0        , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiPhi1020           , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDStar2010Plus     , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDStar2007_0       , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDStar_s2112Plus   , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiJPsi1S            , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiF2_1270           , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiP                 , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiLambda            , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiSigma0            , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiXiMinus           , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiDelta1232PlusPlus , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiSigma1385Minus    , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiSigma1385Plus     , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiSigma1385PlusMinus, 1.0/sumOfWeights());
+      scale(_hist10MeanMultiXi1530_0          , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiOmegaMinus        , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiLambda_c_Plus     , 1.0/sumOfWeights());
+      scale(_hist10MeanMultiSigma_c_PlusPlus_0, 1.0/sumOfWeights());
+      scale(_hist10MeanMultiLambda1520        , 1.0/sumOfWeights());
+
+      scale(_hist32MeanMultiPiPlus            , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiPi0               , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiKPlus             , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiK0                , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiEta               , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiEtaPrime          , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiDPlus             , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiD0                , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiDPlus_s           , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiF0_980            , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiRho770_0          , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiKStar892Plus      , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiKStar892_0        , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiPhi1020           , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiDStar2010Plus     , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiDStar2007_0       , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiF2_1270           , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiK2Star1430Plus    , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiK2Star1430_0      , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiP                 , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiLambda            , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiXiMinus           , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiSigma1385Minus    , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiSigma1385Plus     , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiSigma1385PlusMinus, 1.0/sumOfWeights());
+      scale(_hist32MeanMultiOmegaMinus        , 1.0/sumOfWeights());
+      scale(_hist32MeanMultiLambda_c_Plus     , 1.0/sumOfWeights());
+
+      scale(_hist91MeanMultiPiPlus            , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiPi0               , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiKPlus             , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiK0                , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiEta               , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiEtaPrime          , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDPlus             , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiD0                , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDPlus_s           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiBPlus_B0_d        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiBPlus_u           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiB0_s              , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiF0_980            , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiA0_980Plus        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiRho770_0          , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiRho770Plus        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiOmega782          , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiKStar892Plus      , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiKStar892_0        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiPhi1020           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDStar2010Plus     , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDStar_s2112Plus   , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiBStar             , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiJPsi1S            , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiPsi2S             , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiUpsilon1S         , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiF1_1285           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiF1_1420           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiChi_c1_3510       , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiF2_1270           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiF2Prime1525       , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiK2Star1430_0      , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiBStarStar         , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDs1Plus           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDs2Plus           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiP                 , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiLambda            , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigma0            , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigmaMinus        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigmaPlus         , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigmaPlusMinus    , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiXiMinus           , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiDelta1232PlusPlus , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigma1385Minus    , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigma1385Plus     , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiSigma1385PlusMinus, 1.0/sumOfWeights());
+      scale(_hist91MeanMultiXi1530_0          , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiOmegaMinus        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiLambda_c_Plus     , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiLambda_b_0        , 1.0/sumOfWeights());
+      scale(_hist91MeanMultiLambda1520        , 1.0/sumOfWeights());
+
+      scale(_hist165MeanMultiPiPlus           , 1.0/sumOfWeights());
+      scale(_hist165MeanMultiKPlus            , 1.0/sumOfWeights());
+      scale(_hist165MeanMultiK0               , 1.0/sumOfWeights());
+      scale(_hist165MeanMultiP                , 1.0/sumOfWeights());
+      scale(_hist165MeanMultiLambda           , 1.0/sumOfWeights());
+    }
+
+
+    //@}
+
+
+  private:
+
+    AIDA::IHistogram1D *_hist10MeanMultiPiPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiPi0;
+    AIDA::IHistogram1D *_hist10MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiK0;
+    AIDA::IHistogram1D *_hist10MeanMultiEta;
+    AIDA::IHistogram1D *_hist10MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist10MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiD0;
+    AIDA::IHistogram1D *_hist10MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist10MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist10MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist10MeanMultiOmega782;
+    AIDA::IHistogram1D *_hist10MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist10MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar2007_0;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar_s2112Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiJPsi1S;
+    AIDA::IHistogram1D *_hist10MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist10MeanMultiP;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma0;
+    AIDA::IHistogram1D *_hist10MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiDelta1232PlusPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiXi1530_0;
+    AIDA::IHistogram1D *_hist10MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda_c_Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma_c_PlusPlus_0;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda1520;
+
+    AIDA::IHistogram1D *_hist32MeanMultiPiPlus;
+    AIDA::IHistogram1D *_hist32MeanMultiPi0;
+    AIDA::IHistogram1D *_hist32MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist32MeanMultiK0;
+    AIDA::IHistogram1D *_hist32MeanMultiEta;
+    AIDA::IHistogram1D *_hist32MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist32MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist32MeanMultiD0;
+    AIDA::IHistogram1D *_hist32MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist32MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist32MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist32MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist32MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist32MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiDStar2007_0;
+    AIDA::IHistogram1D *_hist32MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist32MeanMultiK2Star1430Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiK2Star1430_0;
+    AIDA::IHistogram1D *_hist32MeanMultiP;
+    AIDA::IHistogram1D *_hist32MeanMultiLambda;
+    AIDA::IHistogram1D *_hist32MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiLambda_c_Plus;
+
+    AIDA::IHistogram1D *_hist91MeanMultiPiPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiPi0;
+    AIDA::IHistogram1D *_hist91MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiK0;
+    AIDA::IHistogram1D *_hist91MeanMultiEta;
+    AIDA::IHistogram1D *_hist91MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist91MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiD0;
+    AIDA::IHistogram1D *_hist91MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist91MeanMultiBPlus_B0_d;
+    AIDA::IHistogram1D *_hist91MeanMultiBPlus_u;
+    AIDA::IHistogram1D *_hist91MeanMultiB0_s;
+    AIDA::IHistogram1D *_hist91MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist91MeanMultiA0_980Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist91MeanMultiRho770Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiOmega782;
+    AIDA::IHistogram1D *_hist91MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist91MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist91MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiDStar_s2112Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiBStar;
+    AIDA::IHistogram1D *_hist91MeanMultiJPsi1S;
+    AIDA::IHistogram1D *_hist91MeanMultiPsi2S;
+    AIDA::IHistogram1D *_hist91MeanMultiUpsilon1S;
+    AIDA::IHistogram1D *_hist91MeanMultiF1_1285;
+    AIDA::IHistogram1D *_hist91MeanMultiF1_1420;
+    AIDA::IHistogram1D *_hist91MeanMultiChi_c1_3510;
+    AIDA::IHistogram1D *_hist91MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist91MeanMultiF2Prime1525;
+    AIDA::IHistogram1D *_hist91MeanMultiK2Star1430_0;
+    AIDA::IHistogram1D *_hist91MeanMultiBStarStar;
+    AIDA::IHistogram1D *_hist91MeanMultiDs1Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiDs2Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiP;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma0;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaPlusMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiDelta1232PlusPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiXi1530_0;
+    AIDA::IHistogram1D *_hist91MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda_c_Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda_b_0;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda1520;
+
+    AIDA::IHistogram1D *_hist165MeanMultiPiPlus;
+    AIDA::IHistogram1D *_hist165MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist165MeanMultiK0;
+    AIDA::IHistogram1D *_hist165MeanMultiP;
+    AIDA::IHistogram1D *_hist165MeanMultiLambda;
+
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<PDG_HADRON_MULTIPLICITIES> plugin_PDG_HADRON_MULTIPLICITIES;
+
+}

Copied: trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc (from r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities_Ratios.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Misc/PDG_Hadron_Multiplicities_Ratios.cc)
@@ -0,0 +1,829 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/UnstableFinalState.hh"
+
+namespace Rivet {
+
+
+  /// @brief Implementation of PDG hadron multiplicities as ratios to pi+- multiplicity
+  /// @author Holger Schulz
+  class PDG_HADRON_MULTIPLICITIES_RATIOS : public Analysis {
+  public:
+    
+    /// Constructor
+    PDG_HADRON_MULTIPLICITIES_RATIOS() 
+      : Analysis("PDG_HADRON_MULTIPLICITIES_RATIOS")
+    {
+      setBeams(ELECTRON, POSITRON); 
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "FS");
+      addProjection(UnstableFinalState(), "UFS");
+      _weightedTotalNumPiPlus10 = 0;
+      _weightedTotalNumPiPlus32 = 0;
+      _weightedTotalNumPiPlus91 = 0;
+      _weightedTotalNumPiPlus165 = 0;
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void analyze(const Event& e) {
+      // First, veto on leptonic events by requiring at least 4 charged FS particles
+      const FinalState& fs = applyProjection<FinalState>(e, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      if (numParticles < 2) {
+        getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
+        vetoEvent;
+      }
+      getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
+      
+      // Get event weight for histo filling
+      const double weight = e.weight();
+
+      // Get beams and average beam momentum
+      const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+                                   beams.second.momentum().vector3().mod() ) / 2.0;
+      getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
+
+      // Final state of unstable particles to get particle spectra
+      const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
+
+
+      if (2*meanBeamMom >= 9.5 && 2*meanBeamMom <= 10.5) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _weightedTotalNumPiPlus10 += weight;
+                break;
+             case 111:
+                _hist10MeanMultiPi0->fill(_hist10MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist10MeanMultiKPlus->fill(_hist10MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist10MeanMultiK0->fill(_hist10MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist10MeanMultiEta->fill(_hist10MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist10MeanMultiEtaPrime->fill(_hist10MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist10MeanMultiDPlus->fill(_hist10MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist10MeanMultiD0->fill(_hist10MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist10MeanMultiDPlus_s->fill(_hist10MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist10MeanMultiF0_980->fill(_hist10MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 113:
+                _hist10MeanMultiRho770_0->fill(_hist10MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 223:
+                _hist10MeanMultiOmega782->fill(_hist10MeanMultiOmega782->binMean(0), weight);
+                break;
+             case 323:
+                _hist10MeanMultiKStar892Plus->fill(_hist10MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist10MeanMultiKStar892_0->fill(_hist10MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist10MeanMultiPhi1020->fill(_hist10MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist10MeanMultiDStar2010Plus->fill(_hist10MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 423:
+                _hist10MeanMultiDStar2007_0->fill(_hist10MeanMultiDStar2007_0->binMean(0), weight);
+                break;
+             case 433:
+                _hist10MeanMultiDStar_s2112Plus->fill(_hist10MeanMultiDStar_s2112Plus->binMean(0), weight);
+                break;
+             case 443:
+                _hist10MeanMultiJPsi1S->fill(_hist10MeanMultiJPsi1S->binMean(0), weight);
+                break;
+             case 225:
+                _hist10MeanMultiF2_1270->fill(_hist10MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 2212:
+                _hist10MeanMultiP->fill(_hist10MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist10MeanMultiLambda->fill(_hist10MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3212:
+                _hist10MeanMultiSigma0->fill(_hist10MeanMultiSigma0->binMean(0), weight);
+                break;
+             case 3312:
+                _hist10MeanMultiXiMinus->fill(_hist10MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 2224:
+                _hist10MeanMultiDelta1232PlusPlus->fill(_hist10MeanMultiDelta1232PlusPlus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist10MeanMultiSigma1385Minus->fill(_hist10MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist10MeanMultiSigma1385PlusMinus->fill(_hist10MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist10MeanMultiSigma1385Plus->fill(_hist10MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist10MeanMultiSigma1385PlusMinus->fill(_hist10MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3324:
+                _hist10MeanMultiXi1530_0->fill(_hist10MeanMultiXi1530_0->binMean(0), weight);
+                break;
+             case 3334:
+                _hist10MeanMultiOmegaMinus->fill(_hist10MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist10MeanMultiLambda_c_Plus->fill(_hist10MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+             case 4222:
+             case 4112:
+                _hist10MeanMultiSigma_c_PlusPlus_0->fill(_hist10MeanMultiSigma_c_PlusPlus_0->binMean(0), weight);
+                break;
+             case 3124:
+                _hist10MeanMultiLambda1520->fill(_hist10MeanMultiLambda1520->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+      if (2*meanBeamMom >= 29 && 2*meanBeamMom <= 35) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _weightedTotalNumPiPlus32 += weight;
+                break;
+             case 111:
+                _hist32MeanMultiPi0->fill(_hist32MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist32MeanMultiKPlus->fill(_hist32MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist32MeanMultiK0->fill(_hist32MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist32MeanMultiEta->fill(_hist32MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist32MeanMultiEtaPrime->fill(_hist32MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist32MeanMultiDPlus->fill(_hist32MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist32MeanMultiD0->fill(_hist32MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist32MeanMultiDPlus_s->fill(_hist32MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist32MeanMultiF0_980->fill(_hist32MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 113:
+                _hist32MeanMultiRho770_0->fill(_hist32MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 323:
+                _hist32MeanMultiKStar892Plus->fill(_hist32MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist32MeanMultiKStar892_0->fill(_hist32MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist32MeanMultiPhi1020->fill(_hist32MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist32MeanMultiDStar2010Plus->fill(_hist32MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 423:
+                _hist32MeanMultiDStar2007_0->fill(_hist32MeanMultiDStar2007_0->binMean(0), weight);
+                break;
+             case 225:
+                _hist32MeanMultiF2_1270->fill(_hist32MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 325:
+                _hist32MeanMultiK2Star1430Plus->fill(_hist32MeanMultiK2Star1430Plus->binMean(0), weight);
+                break;
+             case 315:
+                _hist32MeanMultiK2Star1430_0->fill(_hist32MeanMultiK2Star1430_0->binMean(0), weight);
+                break;
+             case 2212:
+                _hist32MeanMultiP->fill(_hist32MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist32MeanMultiLambda->fill(_hist32MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3312:
+                _hist32MeanMultiXiMinus->fill(_hist32MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist32MeanMultiSigma1385Minus->fill(_hist32MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist32MeanMultiSigma1385PlusMinus->fill(_hist32MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist32MeanMultiSigma1385Plus->fill(_hist32MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist32MeanMultiSigma1385PlusMinus->fill(_hist32MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3334:
+                _hist32MeanMultiOmegaMinus->fill(_hist32MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist32MeanMultiLambda_c_Plus->fill(_hist32MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+
+
+      if (2*meanBeamMom >= 89.5 && 2*meanBeamMom <= 91.8) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _weightedTotalNumPiPlus91 += weight;
+                break;
+             case 111:
+                _hist91MeanMultiPi0->fill(_hist91MeanMultiPi0->binMean(0), weight);
+                break;
+             case 321:
+                _hist91MeanMultiKPlus->fill(_hist91MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist91MeanMultiK0->fill(_hist91MeanMultiK0->binMean(0), weight);
+                break;
+             case 221:
+                _hist91MeanMultiEta->fill(_hist91MeanMultiEta->binMean(0), weight);
+                break;
+             case 331:
+                _hist91MeanMultiEtaPrime->fill(_hist91MeanMultiEtaPrime->binMean(0), weight);
+                break;
+             case 411:
+                _hist91MeanMultiDPlus->fill(_hist91MeanMultiDPlus->binMean(0), weight);
+                break;
+             case 421:
+                _hist91MeanMultiD0->fill(_hist91MeanMultiD0->binMean(0), weight);
+                break;
+             case 431:
+                _hist91MeanMultiDPlus_s->fill(_hist91MeanMultiDPlus_s->binMean(0), weight);
+                break;
+             case 511:
+                _hist91MeanMultiBPlus_B0_d->fill(_hist91MeanMultiBPlus_B0_d->binMean(0), weight);
+                break;
+             case 521:
+                _hist91MeanMultiBPlus_B0_d->fill(_hist91MeanMultiBPlus_B0_d->binMean(0), weight);
+                _hist91MeanMultiBPlus_u->fill(_hist91MeanMultiBPlus_u->binMean(0), weight);
+                break;
+             case 531:
+                _hist91MeanMultiB0_s->fill(_hist91MeanMultiB0_s->binMean(0), weight);
+                break;
+             case 9010221:
+                _hist91MeanMultiF0_980->fill(_hist91MeanMultiF0_980->binMean(0), weight);
+                break;
+             case 9000211:
+                _hist91MeanMultiA0_980Plus->fill(_hist91MeanMultiA0_980Plus->binMean(0), weight);
+                break;
+             case 113:
+                _hist91MeanMultiRho770_0->fill(_hist91MeanMultiRho770_0->binMean(0), weight);
+                break;
+             case 213:
+                _hist91MeanMultiRho770Plus->fill(_hist91MeanMultiRho770Plus->binMean(0), weight);
+                break;
+             case 223:
+                _hist91MeanMultiOmega782->fill(_hist91MeanMultiOmega782->binMean(0), weight);
+                break;
+             case 323:
+                _hist91MeanMultiKStar892Plus->fill(_hist91MeanMultiKStar892Plus->binMean(0), weight);
+                break;
+             case 313:
+                _hist91MeanMultiKStar892_0->fill(_hist91MeanMultiKStar892_0->binMean(0), weight);
+                break;
+             case 333:
+                _hist91MeanMultiPhi1020->fill(_hist91MeanMultiPhi1020->binMean(0), weight);
+                break;
+             case 413:
+                _hist91MeanMultiDStar2010Plus->fill(_hist91MeanMultiDStar2010Plus->binMean(0), weight);
+                break;
+             case 433:
+                _hist91MeanMultiDStar_s2112Plus->fill(_hist91MeanMultiDStar_s2112Plus->binMean(0), weight);
+                break;
+             case 513:
+             case 523:
+             case 533:
+                _hist91MeanMultiBStar->fill(_hist91MeanMultiBStar->binMean(0), weight);
+                break;
+             case 443:
+                _hist91MeanMultiJPsi1S->fill(_hist91MeanMultiJPsi1S->binMean(0), weight);
+                break;
+             case 100443:
+                _hist91MeanMultiPsi2S->fill(_hist91MeanMultiPsi2S->binMean(0), weight);
+                break;
+             case 553:
+                _hist91MeanMultiUpsilon1S->fill(_hist91MeanMultiUpsilon1S->binMean(0), weight);
+                break;
+             case 20223:
+                _hist91MeanMultiF1_1285->fill(_hist91MeanMultiF1_1285->binMean(0), weight);
+                break;
+             case 20333:
+                _hist91MeanMultiF1_1420->fill(_hist91MeanMultiF1_1420->binMean(0), weight);
+                break;
+             case 445:
+                _hist91MeanMultiChi_c1_3510->fill(_hist91MeanMultiChi_c1_3510->binMean(0), weight);
+                break;
+             case 225:
+                _hist91MeanMultiF2_1270->fill(_hist91MeanMultiF2_1270->binMean(0), weight);
+                break;
+             case 335:
+                _hist91MeanMultiF2Prime1525->fill(_hist91MeanMultiF2Prime1525->binMean(0), weight);
+                break;
+             case 315:
+                _hist91MeanMultiK2Star1430_0->fill(_hist91MeanMultiK2Star1430_0->binMean(0), weight);
+                break;
+             case 515:
+             case 525:
+             case 535:
+                _hist91MeanMultiBStarStar->fill(_hist91MeanMultiBStarStar->binMean(0), weight);
+                break;
+             case 10433:
+             case 20433:
+                _hist91MeanMultiDs1Plus->fill(_hist91MeanMultiDs1Plus->binMean(0), weight);
+                break;
+             case 435:
+                _hist91MeanMultiDs2Plus->fill(_hist91MeanMultiDs2Plus->binMean(0), weight);
+                break;
+             case 2212:
+                _hist91MeanMultiP->fill(_hist91MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist91MeanMultiLambda->fill(_hist91MeanMultiLambda->binMean(0), weight);
+                break;
+             case 3212:
+                _hist91MeanMultiSigma0->fill(_hist91MeanMultiSigma0->binMean(0), weight);
+                break;
+             case 3112:
+                _hist91MeanMultiSigmaMinus->fill(_hist91MeanMultiSigmaMinus->binMean(0), weight);
+                _hist91MeanMultiSigmaPlusMinus->fill(_hist91MeanMultiSigmaPlusMinus->binMean(0), weight);
+                break;
+             case 3222:
+                _hist91MeanMultiSigmaPlus->fill(_hist91MeanMultiSigmaPlus->binMean(0), weight);
+                _hist91MeanMultiSigmaPlusMinus->fill(_hist91MeanMultiSigmaPlusMinus->binMean(0), weight);
+                break;
+             case 3312:
+                _hist91MeanMultiXiMinus->fill(_hist91MeanMultiXiMinus->binMean(0), weight);
+                break;
+             case 2224:
+                _hist91MeanMultiDelta1232PlusPlus->fill(_hist91MeanMultiDelta1232PlusPlus->binMean(0), weight);
+                break;
+             case 3114:
+                _hist91MeanMultiSigma1385Minus->fill(_hist91MeanMultiSigma1385Minus->binMean(0), weight);
+                _hist91MeanMultiSigma1385PlusMinus->fill(_hist91MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3224:
+                _hist91MeanMultiSigma1385Plus->fill(_hist91MeanMultiSigma1385Plus->binMean(0), weight);
+                _hist91MeanMultiSigma1385PlusMinus->fill(_hist91MeanMultiSigma1385PlusMinus->binMean(0), weight);
+                break;
+             case 3324:
+                _hist91MeanMultiXi1530_0->fill(_hist91MeanMultiXi1530_0->binMean(0), weight);
+                break;
+             case 3334:
+                _hist91MeanMultiOmegaMinus->fill(_hist91MeanMultiOmegaMinus->binMean(0), weight);
+                break;
+             case 4122:
+                _hist91MeanMultiLambda_c_Plus->fill(_hist91MeanMultiLambda_c_Plus->binMean(0), weight);
+                break;
+             case 5122:
+                _hist91MeanMultiLambda_b_0->fill(_hist91MeanMultiLambda_b_0->binMean(0), weight);
+                break;
+             case 3124:
+                _hist91MeanMultiLambda1520->fill(_hist91MeanMultiLambda1520->binMean(0), weight);
+                break;
+          }
+        }
+      }
+
+
+
+      if (2*meanBeamMom >= 130 && 2*meanBeamMom <= 200) {
+        for (ParticleVector::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) {
+          int id = abs(p->pdgId());
+          switch (id) {
+             case 211:
+                _weightedTotalNumPiPlus165 += weight;
+                break;
+             case 321:
+                _hist165MeanMultiKPlus->fill(_hist165MeanMultiKPlus->binMean(0), weight);
+                break;
+             case 130:
+             case 310:
+                _hist165MeanMultiK0->fill(_hist165MeanMultiK0->binMean(0), weight);
+                break;
+             case 2212:
+                _hist165MeanMultiP->fill(_hist165MeanMultiP->binMean(0), weight);
+                break;
+             case 3122:
+                _hist165MeanMultiLambda->fill(_hist165MeanMultiLambda->binMean(0), weight);
+                break;
+          }
+        }
+      }
+    }
+
+
+
+    void init() {
+      _hist10MeanMultiPi0                = bookHistogram1D( 2, 1, 1);
+      _hist10MeanMultiKPlus              = bookHistogram1D( 3, 1, 1);
+      _hist10MeanMultiK0                 = bookHistogram1D( 4, 1, 1);
+      _hist10MeanMultiEta                = bookHistogram1D( 5, 1, 1);
+      _hist10MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 1);
+      _hist10MeanMultiDPlus              = bookHistogram1D( 7, 1, 1);
+      _hist10MeanMultiD0                 = bookHistogram1D( 8, 1, 1);
+      _hist10MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 1);
+      _hist10MeanMultiF0_980             = bookHistogram1D(13, 1, 1);
+      _hist10MeanMultiRho770_0           = bookHistogram1D(15, 1, 1);
+      _hist10MeanMultiOmega782           = bookHistogram1D(17, 1, 1);
+      _hist10MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 1);
+      _hist10MeanMultiKStar892_0         = bookHistogram1D(19, 1, 1);
+      _hist10MeanMultiPhi1020            = bookHistogram1D(20, 1, 1);
+      _hist10MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 1);
+      _hist10MeanMultiDStar2007_0        = bookHistogram1D(22, 1, 1);
+      _hist10MeanMultiDStar_s2112Plus    = bookHistogram1D(23, 1, 1);
+      _hist10MeanMultiJPsi1S             = bookHistogram1D(25, 1, 1);
+      _hist10MeanMultiF2_1270            = bookHistogram1D(31, 1, 1);
+      _hist10MeanMultiP                  = bookHistogram1D(38, 1, 1);
+      _hist10MeanMultiLambda             = bookHistogram1D(39, 1, 1);
+      _hist10MeanMultiSigma0             = bookHistogram1D(40, 1, 1);
+      _hist10MeanMultiXiMinus            = bookHistogram1D(44, 1, 1);
+      _hist10MeanMultiDelta1232PlusPlus  = bookHistogram1D(45, 1, 1);
+      _hist10MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 1);
+      _hist10MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 1);
+      _hist10MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 1);
+      _hist10MeanMultiXi1530_0           = bookHistogram1D(49, 1, 1);
+      _hist10MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 1);
+      _hist10MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 1);
+      _hist10MeanMultiSigma_c_PlusPlus_0 = bookHistogram1D(53, 1, 1);
+      _hist10MeanMultiLambda1520         = bookHistogram1D(54, 1, 1);
+
+      _hist32MeanMultiPi0                = bookHistogram1D( 2, 1, 2);
+      _hist32MeanMultiKPlus              = bookHistogram1D( 3, 1, 2);
+      _hist32MeanMultiK0                 = bookHistogram1D( 4, 1, 2);
+      _hist32MeanMultiEta                = bookHistogram1D( 5, 1, 2);
+      _hist32MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 2);
+      _hist32MeanMultiDPlus              = bookHistogram1D( 7, 1, 2);
+      _hist32MeanMultiD0                 = bookHistogram1D( 8, 1, 2);
+      _hist32MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 2);
+      _hist32MeanMultiF0_980             = bookHistogram1D(13, 1, 2);
+      _hist32MeanMultiRho770_0           = bookHistogram1D(15, 1, 2);
+      _hist32MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 2);
+      _hist32MeanMultiKStar892_0         = bookHistogram1D(19, 1, 2);
+      _hist32MeanMultiPhi1020            = bookHistogram1D(20, 1, 2);
+      _hist32MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 2);
+      _hist32MeanMultiDStar2007_0        = bookHistogram1D(22, 1, 2);
+      _hist32MeanMultiF2_1270            = bookHistogram1D(31, 1, 2);
+      _hist32MeanMultiK2Star1430Plus     = bookHistogram1D(33, 1, 1);
+      _hist32MeanMultiK2Star1430_0       = bookHistogram1D(34, 1, 1);
+      _hist32MeanMultiP                  = bookHistogram1D(38, 1, 2);
+      _hist32MeanMultiLambda             = bookHistogram1D(39, 1, 2);
+      _hist32MeanMultiXiMinus            = bookHistogram1D(44, 1, 2);
+      _hist32MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 2);
+      _hist32MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 2);
+      _hist32MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 2);
+      _hist32MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 2);
+      _hist32MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 2);
+
+      _hist91MeanMultiPi0                = bookHistogram1D( 2, 1, 3);
+      _hist91MeanMultiKPlus              = bookHistogram1D( 3, 1, 3);
+      _hist91MeanMultiK0                 = bookHistogram1D( 4, 1, 3);
+      _hist91MeanMultiEta                = bookHistogram1D( 5, 1, 3);
+      _hist91MeanMultiEtaPrime           = bookHistogram1D( 6, 1, 3);
+      _hist91MeanMultiDPlus              = bookHistogram1D( 7, 1, 3);
+      _hist91MeanMultiD0                 = bookHistogram1D( 8, 1, 3);
+      _hist91MeanMultiDPlus_s            = bookHistogram1D( 9, 1, 3);
+      _hist91MeanMultiBPlus_B0_d         = bookHistogram1D(10, 1, 1);
+      _hist91MeanMultiBPlus_u            = bookHistogram1D(11, 1, 1);
+      _hist91MeanMultiB0_s               = bookHistogram1D(12, 1, 1);
+      _hist91MeanMultiF0_980             = bookHistogram1D(13, 1, 3);
+      _hist91MeanMultiA0_980Plus         = bookHistogram1D(14, 1, 1);
+      _hist91MeanMultiRho770_0           = bookHistogram1D(15, 1, 3);
+      _hist91MeanMultiRho770Plus         = bookHistogram1D(16, 1, 1);
+      _hist91MeanMultiOmega782           = bookHistogram1D(17, 1, 2);
+      _hist91MeanMultiKStar892Plus       = bookHistogram1D(18, 1, 3);
+      _hist91MeanMultiKStar892_0         = bookHistogram1D(19, 1, 3);
+      _hist91MeanMultiPhi1020            = bookHistogram1D(20, 1, 3);
+      _hist91MeanMultiDStar2010Plus      = bookHistogram1D(21, 1, 3);
+      _hist91MeanMultiDStar_s2112Plus    = bookHistogram1D(23, 1, 2);
+      _hist91MeanMultiBStar              = bookHistogram1D(24, 1, 1);
+      _hist91MeanMultiJPsi1S             = bookHistogram1D(25, 1, 2);
+      _hist91MeanMultiPsi2S              = bookHistogram1D(26, 1, 1);
+      _hist91MeanMultiUpsilon1S          = bookHistogram1D(27, 1, 1);
+      _hist91MeanMultiF1_1285            = bookHistogram1D(28, 1, 1);
+      _hist91MeanMultiF1_1420            = bookHistogram1D(29, 1, 1);
+      _hist91MeanMultiChi_c1_3510        = bookHistogram1D(30, 1, 1);
+      _hist91MeanMultiF2_1270            = bookHistogram1D(31, 1, 3);
+      _hist91MeanMultiF2Prime1525        = bookHistogram1D(32, 1, 1);
+      _hist91MeanMultiK2Star1430_0       = bookHistogram1D(34, 1, 2);
+      _hist91MeanMultiBStarStar          = bookHistogram1D(35, 1, 1);
+      _hist91MeanMultiDs1Plus            = bookHistogram1D(36, 1, 1);
+      _hist91MeanMultiDs2Plus            = bookHistogram1D(37, 1, 1);
+      _hist91MeanMultiP                  = bookHistogram1D(38, 1, 3);
+      _hist91MeanMultiLambda             = bookHistogram1D(39, 1, 3);
+      _hist91MeanMultiSigma0             = bookHistogram1D(40, 1, 2);
+      _hist91MeanMultiSigmaMinus         = bookHistogram1D(41, 1, 1);
+      _hist91MeanMultiSigmaPlus          = bookHistogram1D(42, 1, 1);
+      _hist91MeanMultiSigmaPlusMinus     = bookHistogram1D(43, 1, 1);
+      _hist91MeanMultiXiMinus            = bookHistogram1D(44, 1, 3);
+      _hist91MeanMultiDelta1232PlusPlus  = bookHistogram1D(45, 1, 2);
+      _hist91MeanMultiSigma1385Minus     = bookHistogram1D(46, 1, 3);
+      _hist91MeanMultiSigma1385Plus      = bookHistogram1D(47, 1, 3);
+      _hist91MeanMultiSigma1385PlusMinus = bookHistogram1D(48, 1, 3);
+      _hist91MeanMultiXi1530_0           = bookHistogram1D(49, 1, 2);
+      _hist91MeanMultiOmegaMinus         = bookHistogram1D(50, 1, 3);
+      _hist91MeanMultiLambda_c_Plus      = bookHistogram1D(51, 1, 3);
+      _hist91MeanMultiLambda_b_0         = bookHistogram1D(52, 1, 1);
+      _hist91MeanMultiLambda1520         = bookHistogram1D(54, 1, 2);
+
+      _hist165MeanMultiKPlus             = bookHistogram1D( 3, 1, 4);
+      _hist165MeanMultiK0                = bookHistogram1D( 4, 1, 4);
+      _hist165MeanMultiP                 = bookHistogram1D(38, 1, 4);
+      _hist165MeanMultiLambda            = bookHistogram1D(39, 1, 4);
+    }
+
+
+
+    // Finalize
+    void finalize() {
+      scale(_hist10MeanMultiPi0               , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiKPlus             , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiK0                , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiEta               , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiEtaPrime          , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDPlus             , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiD0                , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDPlus_s           , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiF0_980            , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiRho770_0          , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiOmega782          , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiKStar892Plus      , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiKStar892_0        , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiPhi1020           , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDStar2010Plus     , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDStar2007_0       , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDStar_s2112Plus   , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiJPsi1S            , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiF2_1270           , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiP                 , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiLambda            , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiSigma0            , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiXiMinus           , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiDelta1232PlusPlus , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiSigma1385Minus    , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiSigma1385Plus     , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiSigma1385PlusMinus, 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiXi1530_0          , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiOmegaMinus        , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiLambda_c_Plus     , 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiSigma_c_PlusPlus_0, 1.0/_weightedTotalNumPiPlus10);
+      scale(_hist10MeanMultiLambda1520        , 1.0/_weightedTotalNumPiPlus10);
+
+      scale(_hist32MeanMultiPi0               , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiKPlus             , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiK0                , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiEta               , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiEtaPrime          , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiDPlus             , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiD0                , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiDPlus_s           , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiF0_980            , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiRho770_0          , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiKStar892Plus      , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiKStar892_0        , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiPhi1020           , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiDStar2010Plus     , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiDStar2007_0       , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiF2_1270           , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiK2Star1430Plus    , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiK2Star1430_0      , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiP                 , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiLambda            , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiXiMinus           , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiSigma1385Minus    , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiSigma1385Plus     , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiSigma1385PlusMinus, 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiOmegaMinus        , 1.0/_weightedTotalNumPiPlus32);
+      scale(_hist32MeanMultiLambda_c_Plus     , 1.0/_weightedTotalNumPiPlus32);
+
+      scale(_hist91MeanMultiPi0               , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiKPlus             , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiK0                , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiEta               , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiEtaPrime          , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDPlus             , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiD0                , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDPlus_s           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiBPlus_B0_d        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiBPlus_u           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiB0_s              , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiF0_980            , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiA0_980Plus        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiRho770_0          , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiRho770Plus        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiOmega782          , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiKStar892Plus      , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiKStar892_0        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiPhi1020           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDStar2010Plus     , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDStar_s2112Plus   , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiBStar             , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiJPsi1S            , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiPsi2S             , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiUpsilon1S         , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiF1_1285           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiF1_1420           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiChi_c1_3510       , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiF2_1270           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiF2Prime1525       , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiK2Star1430_0      , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiBStarStar         , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDs1Plus           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDs2Plus           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiP                 , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiLambda            , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigma0            , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigmaMinus        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigmaPlus         , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigmaPlusMinus    , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiXiMinus           , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiDelta1232PlusPlus , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigma1385Minus    , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigma1385Plus     , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiSigma1385PlusMinus, 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiXi1530_0          , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiOmegaMinus        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiLambda_c_Plus     , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiLambda_b_0        , 1.0/_weightedTotalNumPiPlus91);
+      scale(_hist91MeanMultiLambda1520        , 1.0/_weightedTotalNumPiPlus91);
+
+      scale(_hist165MeanMultiKPlus            , 1.0/_weightedTotalNumPiPlus165);
+      scale(_hist165MeanMultiK0               , 1.0/_weightedTotalNumPiPlus165);
+      scale(_hist165MeanMultiP                , 1.0/_weightedTotalNumPiPlus165);
+      scale(_hist165MeanMultiLambda           , 1.0/_weightedTotalNumPiPlus165);
+    }
+
+    //@}
+
+
+  private:
+
+    double _weightedTotalNumPiPlus10;
+    double _weightedTotalNumPiPlus32;
+    double _weightedTotalNumPiPlus91;
+    double _weightedTotalNumPiPlus165;
+
+    AIDA::IHistogram1D *_hist10MeanMultiPi0;
+    AIDA::IHistogram1D *_hist10MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiK0;
+    AIDA::IHistogram1D *_hist10MeanMultiEta;
+    AIDA::IHistogram1D *_hist10MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist10MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiD0;
+    AIDA::IHistogram1D *_hist10MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist10MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist10MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist10MeanMultiOmega782;
+    AIDA::IHistogram1D *_hist10MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist10MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar2007_0;
+    AIDA::IHistogram1D *_hist10MeanMultiDStar_s2112Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiJPsi1S;
+    AIDA::IHistogram1D *_hist10MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist10MeanMultiP;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma0;
+    AIDA::IHistogram1D *_hist10MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiDelta1232PlusPlus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiXi1530_0;
+    AIDA::IHistogram1D *_hist10MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda_c_Plus;
+    AIDA::IHistogram1D *_hist10MeanMultiSigma_c_PlusPlus_0;
+    AIDA::IHistogram1D *_hist10MeanMultiLambda1520;
+
+    AIDA::IHistogram1D *_hist32MeanMultiPi0;
+    AIDA::IHistogram1D *_hist32MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist32MeanMultiK0;
+    AIDA::IHistogram1D *_hist32MeanMultiEta;
+    AIDA::IHistogram1D *_hist32MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist32MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist32MeanMultiD0;
+    AIDA::IHistogram1D *_hist32MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist32MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist32MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist32MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist32MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist32MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiDStar2007_0;
+    AIDA::IHistogram1D *_hist32MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist32MeanMultiK2Star1430Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiK2Star1430_0;
+    AIDA::IHistogram1D *_hist32MeanMultiP;
+    AIDA::IHistogram1D *_hist32MeanMultiLambda;
+    AIDA::IHistogram1D *_hist32MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist32MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist32MeanMultiLambda_c_Plus;
+
+    AIDA::IHistogram1D *_hist91MeanMultiPi0;
+    AIDA::IHistogram1D *_hist91MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiK0;
+    AIDA::IHistogram1D *_hist91MeanMultiEta;
+    AIDA::IHistogram1D *_hist91MeanMultiEtaPrime;
+    AIDA::IHistogram1D *_hist91MeanMultiDPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiD0;
+    AIDA::IHistogram1D *_hist91MeanMultiDPlus_s;
+    AIDA::IHistogram1D *_hist91MeanMultiBPlus_B0_d;
+    AIDA::IHistogram1D *_hist91MeanMultiBPlus_u;
+    AIDA::IHistogram1D *_hist91MeanMultiB0_s;
+    AIDA::IHistogram1D *_hist91MeanMultiF0_980;
+    AIDA::IHistogram1D *_hist91MeanMultiA0_980Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiRho770_0;
+    AIDA::IHistogram1D *_hist91MeanMultiRho770Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiOmega782;
+    AIDA::IHistogram1D *_hist91MeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiKStar892_0;
+    AIDA::IHistogram1D *_hist91MeanMultiPhi1020;
+    AIDA::IHistogram1D *_hist91MeanMultiDStar2010Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiDStar_s2112Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiBStar;
+    AIDA::IHistogram1D *_hist91MeanMultiJPsi1S;
+    AIDA::IHistogram1D *_hist91MeanMultiPsi2S;
+    AIDA::IHistogram1D *_hist91MeanMultiUpsilon1S;
+    AIDA::IHistogram1D *_hist91MeanMultiF1_1285;
+    AIDA::IHistogram1D *_hist91MeanMultiF1_1420;
+    AIDA::IHistogram1D *_hist91MeanMultiChi_c1_3510;
+    AIDA::IHistogram1D *_hist91MeanMultiF2_1270;
+    AIDA::IHistogram1D *_hist91MeanMultiF2Prime1525;
+    AIDA::IHistogram1D *_hist91MeanMultiK2Star1430_0;
+    AIDA::IHistogram1D *_hist91MeanMultiBStarStar;
+    AIDA::IHistogram1D *_hist91MeanMultiDs1Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiDs2Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiP;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma0;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigmaPlusMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiXiMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiDelta1232PlusPlus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385Minus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiSigma1385PlusMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiXi1530_0;
+    AIDA::IHistogram1D *_hist91MeanMultiOmegaMinus;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda_c_Plus;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda_b_0;
+    AIDA::IHistogram1D *_hist91MeanMultiLambda1520;
+
+    AIDA::IHistogram1D *_hist165MeanMultiKPlus;
+    AIDA::IHistogram1D *_hist165MeanMultiK0;
+    AIDA::IHistogram1D *_hist165MeanMultiP;
+    AIDA::IHistogram1D *_hist165MeanMultiLambda;
+
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<PDG_HADRON_MULTIPLICITIES_RATIOS> plugin_PDG_HADRON_MULTIPLICITIES_RATIOS;
+
+}

Copied: trunk/src/Analyses/SFM_1984_S1178091.cc (from r1802, trunk/src/Analyses/Misc/SFM_1984_S1178091.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/SFM_1984_S1178091.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/Misc/SFM_1984_S1178091.cc)
@@ -0,0 +1,155 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+  class SFM_1984_S1178091 : public Analysis {
+  public:
+
+    /// Constructor
+    SFM_1984_S1178091() : Analysis("SFM_1984_S1178091") {
+      setBeams(PROTON, PROTON);
+      addProjection(Beam(), "Beam");
+      addProjection(ChargedFinalState(), "FS");
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      _hist_multiplicity_inel_30 = bookHistogram1D(1, 1, 1); 
+      _hist_multiplicity_inel_45 = bookHistogram1D(1, 1, 2);
+      _hist_multiplicity_inel_53 = bookHistogram1D(1, 1, 3);
+      _hist_multiplicity_inel_63 = bookHistogram1D(1, 1, 4);
+      _hist_multiplicity_nsd_30 = bookHistogram1D(2, 1, 1);
+      _hist_multiplicity_nsd_45 = bookHistogram1D(2, 1, 2);
+      _hist_multiplicity_nsd_53 = bookHistogram1D(2, 1, 3);
+      _hist_multiplicity_nsd_63 = bookHistogram1D(2, 1, 4);  
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
+      const size_t numParticles = fs.particles().size();
+      
+      // Decide whether event is of diffractive type or not 
+      // FIXME: it is not so clear in the paper how this distinction is made.
+      // They seem to require either exactly one particle with Feynman x larger
+      // than 0.8 to call an event diffractive or that there are no tracks
+      // reconstructed in either of the two hemispheres. For the latter
+      // they require in addition also the number of cahrged particles
+      // to be smaller than 8.
+      
+      int n_left(0), n_right(0), n_large_x(0);
+      foreach (const Particle& p, fs.particles()) {
+        // Calculate the particles' Feynman x  
+        const double x_feyn = 2.0 * (p.momentum().pz()/GeV) / sqrtS;
+        if (fabs(x_feyn) > 0.8 ) n_large_x += 1;
+        
+        // Pseudorapidity
+        const double eta = p.momentum().pseudorapidity();
+        if (eta > 0.0) n_right += 1;
+        else if (eta < 0.0) n_left += 1;
+      }
+      
+      // Not sure about the "=="
+      /// @todo Numerical precision problem!
+      bool isDiffractive = false;
+      if (n_large_x == 1) isDiffractive = true;
+      
+      // FIXME: Not sure about the "== 1", the paper says no charged particle
+      // that was reconstructed so the incoming protons must run down the beam
+      // pipe. Since we look a the complete final state here no particle being
+      // reconstructed should be equal to one particle (proton) in each
+      // hemisphere.  The "< 8" is also not certain.
+      if ((n_left == 1 || n_right == 1) && numParticles < 8 ) {
+        isDiffractive = true;
+      }
+      
+      getLog() << Log::DEBUG << "N_left: " << n_left << ", N_right: " 
+               << n_right << ", N_large_x: " << n_large_x << endl;
+      
+      
+      // Fill histos of charged multiplicity distributions
+      // The inelastic samples are said to contain also diffractive events.
+      //
+      if (fuzzyEquals(sqrtS, 30.4/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_30 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_30->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_30->fill(numParticles, weight);
+        }  
+      } 
+      else if (fuzzyEquals(sqrtS, 44/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_45 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_45->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_45->fill(numParticles, weight);
+        }  
+      }
+      else if (fuzzyEquals(sqrtS, 53/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_53 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_53->fill(numParticles, weight);
+        } else {
+          _hist_multiplicity_inel_53->fill(numParticles, weight);
+        }  
+      }
+      else if (fuzzyEquals(sqrtS, 63/GeV, 1E-1)) {
+        if (isDiffractive) {
+          _hist_multiplicity_nsd_63 ->fill(numParticles, weight);
+          _hist_multiplicity_inel_63->fill(numParticles, weight);
+        }
+        else {
+          _hist_multiplicity_inel_63->fill(numParticles, weight);
+        }  
+      }
+      
+    }
+    
+    
+    void finalize() {
+      normalize(_hist_multiplicity_inel_30);
+      normalize(_hist_multiplicity_inel_45);
+      normalize(_hist_multiplicity_inel_53);
+      normalize(_hist_multiplicity_inel_63);
+      normalize(_hist_multiplicity_nsd_30 );
+      normalize(_hist_multiplicity_nsd_45 );
+      normalize(_hist_multiplicity_nsd_53 );
+      normalize(_hist_multiplicity_nsd_63 );
+    }
+    //@}
+    
+
+  private:
+    
+    /// @name Histograms
+    //@{
+
+    AIDA::IHistogram1D *_hist_multiplicity_inel_30;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_45;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_53;
+    AIDA::IHistogram1D *_hist_multiplicity_inel_63;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_30;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_45;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_53;
+    AIDA::IHistogram1D *_hist_multiplicity_nsd_63;
+    //@}
+
+  };
+
+
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<SFM_1984_S1178091> plugin_SFM_1984_S1178091;
+
+}

Copied: trunk/src/Analyses/STAR_2006_S6870392.cc (from r1802, trunk/src/Analyses/RHIC/STAR_2006_S6870392.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/STAR_2006_S6870392.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/RHIC/STAR_2006_S6870392.cc)
@@ -0,0 +1,88 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+  /// @brief inclusive jet cross-section in pp at 200 GeV
+  class STAR_2006_S6870392 : public Analysis {
+  public:
+    
+    /// Constructor
+    STAR_2006_S6870392()
+      : Analysis("STAR_2006_S6870392")
+    {
+      setBeams(PROTON, PROTON);
+      FinalState fs(-2.0, 2.0);
+      addProjection(fs, "FS");
+      // R=0.4, pTmin=0, seed_threshold=0.5:
+      /// @todo Presumably this jet alg is wrong...
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.4, 0.0, 0.5), "MidpointJets");
+    } 
+
+
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_jet_pT_MB = bookHistogram1D(1, 1, 1);
+      _h_jet_pT_HT = bookHistogram1D(2, 1, 1);
+    }
+
+    /// Do the analysis 
+    void analyze(const Event& event) {
+      const double weight = event.weight();
+      
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.empty()) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no final state found " << endl;
+        vetoEvent;
+      }
+      
+      // Find jets
+      const FastJets& jetpro = applyProjection<FastJets>(event, "MidpointJets");
+      const PseudoJets& jets = jetpro.pseudoJetsByPt();
+      
+      foreach (fastjet::PseudoJet jet, jets) {
+        if (fabs(jets[0].eta()) < 0.8 && fabs(jets[0].eta()) > 0.2) {
+          _h_jet_pT_MB->fill(jet.perp(), weight);
+          _h_jet_pT_HT->fill(jet.perp(), weight);
+        }
+      }
+    }
+    
+    
+    
+    /// Finalize
+    void finalize() {
+      /// @todo Use the generator cross-section
+      //_h_total_cross_section->fill(crossSection());
+      normalize(_h_jet_pT_MB, 16603100);
+      normalize(_h_jet_pT_HT, 1808234);
+    }
+    
+    //@}
+    
+    
+  private:
+    
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D * _h_jet_pT_MB;
+    AIDA::IHistogram1D * _h_jet_pT_HT;
+    //@}
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<STAR_2006_S6870392> plugin_STAR_2006_S6870392;
+  
+}

Copied: trunk/src/Analyses/STAR_2008_S7993412.cc (from r1802, trunk/src/Analyses/RHIC/STAR_2008_S7993412.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/STAR_2008_S7993412.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/RHIC/STAR_2008_S7993412.cc)
@@ -0,0 +1,90 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/RivetAIDA.hh"
+
+namespace Rivet {
+
+  /// @brief di-hadron correlations in d-Au at 200 GeV
+  class STAR_2008_S7993412 : public Analysis {
+  public:
+
+    STAR_2008_S7993412()
+      : Analysis("STAR_2008_S7993412")
+    {
+      setBeams(PROTON, PROTON);
+      ChargedFinalState fs(-1.0, 1.0, 1.0*GeV);
+      addProjection(fs, "FS");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{ 
+
+    /// Book histograms
+    void init() {
+      _h_Y_jet_trigger = bookProfile1D(1, 1, 1);
+      _h_Y_jet_associated = bookProfile1D(2, 1, 1);
+    }
+
+
+    /// Do the analysis 
+    void analyze(const Event & event) {
+      const double weight = event.weight();
+
+      // Skip if the event is empty
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      if (fs.empty()) {
+        getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
+                 << " because no final state found " << endl;
+        vetoEvent;
+      }
+      
+      foreach (const Particle& tp, fs.particles()) {
+        const double triggerpT = tp.momentum().pT();
+        if (triggerpT >= 2.0 && triggerpT < 5.0) {
+          int N_associated = 0;
+          foreach (const Particle& ap, fs.particles()) {
+            if (ap.momentum().pT() > 1.5 &&
+                ap.momentum().pT() < triggerpT &&
+                deltaPhi(tp.momentum().phi(), ap.momentum().phi()) < 1 &&
+                fabs(tp.momentum().pseudorapidity() - ap.momentum().pseudorapidity()) < 1.75) {
+              N_associated += 1;
+            }
+          }
+          //const double dPhidEta = 2 * 2*1.75;
+          //_h_Y_jet_trigger->fill(triggerpT, N_associated/dPhidEta, weight);
+          _h_Y_jet_trigger->fill(triggerpT, N_associated, weight);
+        }
+      }
+    }
+    
+    
+    /// Finalize
+    void finalize() {
+      /// @todo Use the generator cross-section
+      //_h_total_cross_section->fill(crossSection());
+      //normalize(_h_jet_pT_MB, 16603100);
+      //normalize(_h_jet_pT_HT, 1808234);
+    }
+    
+    //@}
+
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IProfile1D * _h_Y_jet_trigger;
+    AIDA::IProfile1D * _h_Y_jet_associated;
+    //@}
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<STAR_2008_S7993412> plugin_STAR_2008_S7993412;
+  
+}

Copied: trunk/src/Analyses/STAR_2009_UE_HELEN.cc (from r1802, trunk/src/Analyses/RHIC/STAR_2009_UE_HELEN.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/STAR_2009_UE_HELEN.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/RHIC/STAR_2009_UE_HELEN.cc)
@@ -0,0 +1,277 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /* STAR underlying event
+   * @author Hendrik Hoeth
+   */ 
+  class STAR_2009_UE_HELEN : public Analysis {
+  public:
+
+    /// Constructor
+    STAR_2009_UE_HELEN()
+      : Analysis("STAR_2009_UE_HELEN")  
+    {
+      setBeams(PROTON, ANTIPROTON);
+      
+      // Final state for the jet finding
+      const FinalState fsj(-4.0, 4.0, 0.0*GeV);
+      addProjection(fsj, "FSJ");
+      /// @todo STAR doesn't really use a CDF midpoint algorithm!
+      addProjection(FastJets(fsj, FastJets::CDFMIDPOINT, 0.7), "Jets");
+      
+      // Final state for the sum(ET) distributions
+      const FinalState fs(-1.0, 1.0, 0.0*GeV);
+      addProjection(fs, "FS");
+      
+      // Charged final state for the distributions
+      const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
+      addProjection(cfs, "CFS");
+    }
+
+
+    // /// @name Publication metadata
+    // //@{
+
+    // /// Analysis name
+    // string name() const {
+    //   return "STAR_2009_UE_HELEN";
+    // }
+    // /// SPIRES key (IRN)
+    // string spiresId() const {
+    //   return "NONE";
+    // }
+    // /// A short description of the analysis.
+    // string summary() const {
+    //   return "CDF Run 2 underlying event in leading jet events";
+    // }
+    // /// Full description of the analysis, for the manual
+    // string description() const {
+    //   ostringstream os;
+    //   os << "";
+    //   return os.str();
+    // }
+    // /// Experiment which performed and published this analysis.
+    // string experiment() const {
+    //  return "STAR";
+    // }
+    // /// Collider on which the experiment was based
+    // string collider() const {
+    //  return "(RHIC pp 200 GeV)";
+    // }
+    // /// When published according to SPIRES
+    // string year() const {
+    //  return "2008";
+    // }
+    // /// Names & emails of paper/analysis authors.
+    // vector<string> authors() const {
+    //   vector<string> ret;
+    //   ret += "Hendrik Hoeth <hendrik.hoeth at cern.ch>";
+    //   return ret;
+    // }
+    // /// Information about the events needed as input for this analysis.
+    // string runInfo() const {
+    //   ostringstream os;
+    //   os << "* pp interactions at 200 GeV";
+    //   return os.str();
+    // }
+
+    // string status() const {
+    //   return "UNVALIDATED";
+    // }
+    // /// No journal or preprint references.
+    // vector<string> references() const {
+    //   vector<string> ret;
+    //   ret += "";
+    //   return ret;
+    // }
+
+    // //@}
+    
+
+    /// @name Analysis methods
+    //@{
+    
+    /// Book histograms
+    void init() {
+      _hist_pnchg      = bookProfile1D( 1, 1, 1);
+      _hist_pmaxnchg   = bookProfile1D( 2, 1, 1);
+      _hist_pminnchg   = bookProfile1D( 3, 1, 1);
+      _hist_pdifnchg   = bookProfile1D( 4, 1, 1);
+      _hist_pcptsum    = bookProfile1D( 5, 1, 1);
+      _hist_pmaxcptsum = bookProfile1D( 6, 1, 1);
+      _hist_pmincptsum = bookProfile1D( 7, 1, 1);
+      _hist_pdifcptsum = bookProfile1D( 8, 1, 1);
+      _hist_pcptave    = bookProfile1D( 9, 1, 1);
+      //_hist_onchg   = bookProfile1D( 1, 1, 1, "Overall number of charged particles");
+      //_hist_ocptsum = bookProfile1D( 2, 1, 1, "Overall charged $p_\\perp$ sum");
+      //_hist_oetsum  = bookProfile1D( 3, 1, 1, "Overall $E_\\perp$ sum");
+    }
+    
+    
+    /// Do the analysis
+    void analyze(const Event& e) {
+      const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
+      if (fsj.particles().size() < 1) {
+        getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
+        vetoEvent;
+      }
+      
+      const Jets jets = applyProjection<FastJets>(e, "Jets").jetsByPt();
+      getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
+      
+      // We require the leading jet to be within |eta|<2
+      if (jets.size() < 1 || fabs(jets[0].momentum().eta()) >= 2) {
+        getLog() << Log::DEBUG << "Failed jet cut" << endl;
+        vetoEvent;
+      }
+      
+      const double jetphi = jets[0].momentum().phi();
+      const double jetpT  = jets[0].momentum().pT();
+      
+      // Get the event weight
+      const double weight = e.weight();
+      
+      // Get the final states to work with for filling the distributions
+      const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
+      
+      size_t   numOverall(0),     numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0)  ;
+      double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
+      //double EtSumOverall(0.0), EtSumToward(0.0), EtSumTrans1(0.0), EtSumTrans2(0.0), EtSumAway(0.0);
+      double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
+      
+      // Calculate all the charged stuff
+      foreach (const Particle& p, cfs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double pT = p.momentum().pT();
+        const double phi = p.momentum().phi();
+        /// @todo Jet and particle phi should have same ranges this way: check
+        const double rotatedphi = phi - jetphi;
+        
+        ptSumOverall += pT;
+        ++numOverall;
+        if (pT > ptMaxOverall)
+          ptMaxOverall = pT;
+        
+        if (dPhi < PI/3.0) {
+          ptSumToward += pT;
+          ++numToward;
+          if (pT > ptMaxToward)
+            ptMaxToward = pT;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            ptSumTrans1 += pT;
+            ++numTrans1;
+            if (pT > ptMaxTrans1)
+              ptMaxTrans1 = pT;
+          }
+          else {
+            ptSumTrans2 += pT;
+            ++numTrans2;
+            if (pT > ptMaxTrans2)
+              ptMaxTrans2 = pT;
+          }
+        }
+        else {
+          ptSumAway += pT;
+          ++numAway;
+          if (pT > ptMaxAway)
+            ptMaxAway = pT;
+        }
+      } // end charged particle loop
+      
+      
+      #if 0   
+      /// @todo Enable this part when we have the numbers from Rick Field
+      
+      // And now the same business for all particles (including neutrals)
+      foreach (const Particle& p, fs.particles()) {
+        const double dPhi = deltaPhi(p.momentum().phi(), jetphi);
+        const double ET = p.momentum().Et();
+        const double phi = p.momentum().phi();
+        const double rotatedphi = phi - jetphi;
+        
+        EtSumOverall += ET;
+        
+        if (dPhi < PI/3.0) {
+          EtSumToward += ET;
+        }
+        else if (dPhi < 2*PI/3.0) {
+          if (rotatedphi <= PI) {
+            EtSumTrans1 += ET;
+          }
+          else {
+            EtSumTrans2 += ET;
+          }
+        }
+        else {
+          EtSumAway += ET;
+        }
+      } // end all particle loop
+      #endif
+      
+      
+      // Fill the histograms
+      //_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
+      _hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
+      _hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
+      _hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
+      //_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
+      
+      //_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
+      _hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
+      _hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
+      _hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
+      //_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
+      
+      //if (numToward > 0) {
+      //  _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
+      //  _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
+      //}
+      if ((numTrans1+numTrans2) > 0) {
+        _hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
+        //_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
+      }
+      //if (numAway > 0) {
+      //  _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
+      //  _hist_acptmax->fill(jetpT, ptMaxAway, weight);
+      //}
+    }
+    
+    
+    void finalize() {  
+      //
+    }
+
+    //@}
+
+  private:
+
+    AIDA::IProfile1D *_hist_pnchg;
+    AIDA::IProfile1D *_hist_pmaxnchg;
+    AIDA::IProfile1D *_hist_pminnchg;
+    AIDA::IProfile1D *_hist_pdifnchg;
+    AIDA::IProfile1D *_hist_pcptsum;
+    AIDA::IProfile1D *_hist_pmaxcptsum;
+    AIDA::IProfile1D *_hist_pmincptsum;
+    AIDA::IProfile1D *_hist_pdifcptsum;
+    AIDA::IProfile1D *_hist_pcptave;
+
+  };
+
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<STAR_2009_UE_HELEN> plugin_STAR_2009_UE_HELEN;
+  
+}

Copied: trunk/src/Analyses/UA1_1990_S2044935.cc (from r1802, trunk/src/Analyses/SPS/UA1_1990_S2044935.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/UA1_1990_S2044935.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/SPS/UA1_1990_S2044935.cc)
@@ -0,0 +1,171 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/FinalState.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/PVertex.hh"
+#include "Rivet/Projections/TotalVisibleMomentum.hh"
+
+namespace Rivet {
+
+
+  class UA1_1990_S2044935 : public Analysis {
+  public:
+
+    /// Default constructor
+    UA1_1990_S2044935() 
+      : Analysis("UA1_1990_S2044935")
+    {
+      setBeams(PROTON, ANTIPROTON);
+      const ChargedFinalState cfs(-2.5, 2.5);
+      const FinalState fs2(-6., 6.);
+      const FinalState fs(-2.5,2.5);
+      addProjection(fs, "FS");
+      addProjection(fs2, "FS2");
+      addProjection(ChargedFinalState(-2.5, 2.5), "CFS");
+      addProjection(Beam(), "Beam");
+      addProjection(TotalVisibleMomentum(fs), "Mom");
+    }
+    
+
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() { 
+      _hist_sigma200 = bookHistogram1D(1,1,1);
+      _hist_sigma500 = bookHistogram1D(1,1,2);
+      _hist_sigma900 = bookHistogram1D(1,1,3);
+      _hist_Esigma200 = bookHistogram1D(2,1,1);
+      _hist_Esigma500 = bookHistogram1D(2,1,2);
+      _hist_Esigma900 = bookHistogram1D(2,1,3);
+      _hist_Esigmapoint8 = bookHistogram1D(3,1,1);
+      _hist_Esigma4 = bookHistogram1D(4,1,1);
+      _hist_Esigma8 = bookHistogram1D(5,1,1);
+      _hist_Et200 = bookHistogram1D(9,1,1);
+      _hist_Et500 = bookHistogram1D(10,1,1);
+      _hist_Et900 = bookHistogram1D(11,1,1);
+      _hist_Pt63 = bookProfile1D(8,1,1);
+      _hist_Pt200 = bookProfile1D(6,1,1);
+      _hist_Pt900 = bookProfile1D(7,1,1);
+      _hist_Etavg200 = bookProfile1D(12,1,1); 
+      _hist_Etavg500 = bookProfile1D(12,1,2); 
+      _hist_Etavg900 = bookProfile1D(12,1,3);
+    }
+    
+
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
+      const double weight = event.weight();
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
+      const FinalState& fs = applyProjection<FinalState>(event, "FS");
+      double multi = cfs.particles().size();
+
+      if (fuzzyEquals(sqrtS/GeV, 200, 1E-4)) {
+        _hist_sigma200->fill(multi, weight);
+      } else if (fuzzyEquals(sqrtS/GeV, 500)) {
+        _hist_sigma500->fill(multi, weight);
+      }	else if (fuzzyEquals(sqrtS/GeV, 900)) {
+	  _hist_sigma900->fill(multi, weight);
+      }
+      foreach (const Particle& p, fs.particles()) {
+        /// @todo Figure out where the extra factor of 0.5 comes from in the weight factor: eta range?
+        double pt = p.momentum().pT();
+        if (fuzzyEquals(sqrtS/GeV, 200, 1E-4)) {
+          _hist_Esigma200->fill(pt, weight/(2.*10.*M_PI*pt));
+        }
+        if (fuzzyEquals(sqrtS/GeV, 500)) {
+          _hist_Esigma500->fill(pt, weight/(2.*10.*M_PI*pt));
+        }
+        if (fuzzyEquals(sqrtS/GeV, 900)) {
+          _hist_Esigma900->fill(pt, weight/(2.*10.*M_PI*pt));
+          const double dnch_deta = multi/5.0;
+          if (inRange(dnch_deta, 0.8, 4)) {
+            _hist_Esigmapoint8->fill(pt, weight/(10.*M_PI*pt));
+          } else if (dnch_deta > 4 && dnch_deta <= 8) {
+            _hist_Esigma4->fill(pt, weight/(10.*M_PI*pt));
+          } else if(dnch_deta > 8) {
+            _hist_Esigma8->fill(pt, weight/(10.*M_PI*pt));
+          }
+        }                
+      }
+      
+      const double Et = applyProjection<TotalVisibleMomentum>(event, "Mom").scalarET();
+      if (fuzzyEquals(sqrtS, 200, 1E-4)) {
+        _hist_Et200->fill(Et, weight);
+      } else if (fuzzyEquals(sqrtS, 500)) {
+        _hist_Et500->fill(Et, weight);
+      } else if (fuzzyEquals(sqrtS, 900)) {
+        _hist_Et900->fill(Et, weight);
+      }
+
+      foreach (const Particle& p, cfs.particles()) {
+        if (fuzzyEquals(sqrtS, 63, 1E-3)) {
+          _hist_Pt63->fill(multi, p.momentum().pT(), weight);
+        } else if (fuzzyEquals(sqrtS, 200, 1E-4)) {
+          _hist_Pt200->fill(multi, p.momentum().pT(), weight);
+          _hist_Etavg200->fill(multi, Et, weight);
+        } else if (fuzzyEquals(sqrtS, 500)) {
+          _hist_Etavg500->fill(multi, Et, weight);
+        } else if (fuzzyEquals(sqrtS, 900)) {
+          _hist_Pt900->fill(multi, p.momentum().pT(), weight);
+          _hist_Etavg900->fill(multi, Et, weight);
+        }
+      }
+    }
+    
+    
+    void finalize() {
+      ///@todo: get the total cross-sections from the generator
+      ///@todo: check if the scaling for Esigmpoint8, Esigma4 and Esigma8 are correct.
+      normalize(_hist_sigma200, 27.9);
+      normalize(_hist_sigma500, 31.5);
+      normalize(_hist_sigma900, 34.4);
+      scale(_hist_Esigma200, 27.9/sumOfWeights());
+      scale(_hist_Esigma500, 31.5/sumOfWeights());
+      scale(_hist_Esigma900, 34.4/sumOfWeights());
+      scale(_hist_Esigmapoint8, 34400./sumOfWeights());
+      scale(_hist_Esigma4, 3440./sumOfWeights());
+      scale(_hist_Esigma8, 344./sumOfWeights());
+      normalize(_hist_Et200, 27.9);
+      normalize(_hist_Et500, 31.5);
+      normalize(_hist_Et900, 34.4);
+    }
+    
+    //@}
+
+    
+  private:
+    
+    /// @name Histogram collections
+    //@{
+    AIDA::IHistogram1D* _hist_sigma200;
+    AIDA::IHistogram1D* _hist_sigma500;
+    AIDA::IHistogram1D* _hist_sigma900;
+    AIDA::IHistogram1D* _hist_Esigma200;
+    AIDA::IHistogram1D* _hist_Esigma500;
+    AIDA::IHistogram1D* _hist_Esigma900;
+    AIDA::IHistogram1D* _hist_Esigmapoint8;
+    AIDA::IHistogram1D* _hist_Esigma4;
+    AIDA::IHistogram1D* _hist_Esigma8;
+    AIDA::IProfile1D* _hist_Pt63;
+    AIDA::IProfile1D* _hist_Pt200;
+    AIDA::IProfile1D* _hist_Pt900;
+    AIDA::IProfile1D* _hist_Etavg200;
+    AIDA::IProfile1D* _hist_Etavg500;
+    AIDA::IProfile1D* _hist_Etavg900;
+    AIDA::IHistogram1D* _hist_Et200;
+    AIDA::IHistogram1D* _hist_Et500;
+    AIDA::IHistogram1D* _hist_Et900;
+    //@}
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<UA1_1990_S2044935> plugin_UA1_1990_S2044935;
+  
+}

Copied: trunk/src/Analyses/UA5_1982_S875503.cc (from r1802, trunk/src/Analyses/SPS/UA5_1982_S875503.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/UA5_1982_S875503.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/SPS/UA5_1982_S875503.cc)
@@ -0,0 +1,108 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/ParticleIDMethods.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+  class UA5_1982_S875503 : public Analysis {
+  public:
+    
+    /// Default constructor
+    UA5_1982_S875503()
+      : Analysis("UA5_1982_S875503") 
+    {
+      const ChargedFinalState cfs(-3.5, 3.5);
+      addProjection(Beam(), "Beam");
+      addProjection(cfs, "CFS");
+    }
+  
+
+    /// @name Analysis methods
+    //@{
+
+    void init() 
+    { 
+      _hist_nch_pp    = bookHistogram1D(2,1,1);
+      _hist_nch_ppbar = bookHistogram1D(2,1,2);
+      _hist_eta_pp    = bookHistogram1D(3,1,1);
+      _hist_eta_ppbar = bookHistogram1D(4,1,1);
+    }
+    
+    
+    void analyze(const Event& event) {
+      const Beam b = applyProjection<Beam>(event, "Beam");
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
+      const double weight = event.weight();
+      
+      // Different trigger implementations for ppbar and pp!
+      int n_trig_1(0), n_trig_2(0);
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.6, -2.0)) n_trig_1 += 1;
+        else if (inRange(eta, 2.0, 5.6)) n_trig_2 += 1;
+      }
+      
+      // Trigger requirements
+      const bool samebeam = (b.beams().first.pdgId() == b.beams().second.pdgId());
+      if (samebeam) {
+        // PP
+        if (n_trig_1 == 0 || n_trig_2 == 0) vetoEvent; 
+      } else {
+        // PPbar
+        /// @todo Is this actually the exact trigger requirement?
+        if (n_trig_1 * n_trig_2 < 4) vetoEvent;
+      }
+      
+      // Iterate over all FS particles and fill histograms
+      foreach (const Particle& p, cfs.particles()) {
+        if (samebeam) {
+          // PP collision
+          _hist_eta_pp->fill(fabs(p.momentum().pseudorapidity()), weight);
+        } else {
+          // PPbar collision
+          _hist_eta_ppbar->fill(fabs(p.momentum().pseudorapidity()), weight);
+        }
+      }
+      
+      // Fill mean charged multiplicity histos
+      if (samebeam) {
+        // PP
+        _hist_nch_pp->fill(_hist_nch_pp->binMean(0), cfs.particles().size());
+      } else {
+        // PPbar 
+        _hist_nch_ppbar->fill(_hist_nch_ppbar->binMean(0), cfs.particles().size());
+      }
+      
+    }
+    
+    
+    void finalize() {
+      scale(_hist_nch_pp,    1./sumOfWeights());
+      scale(_hist_nch_ppbar, 1./sumOfWeights());
+      normalize(_hist_eta_pp,    5.28);
+      normalize(_hist_eta_ppbar, 5.29);
+    }
+    //@}
+    
+  
+  private:
+    
+    /// @name Histogram collections
+    //@{
+    AIDA::IHistogram1D* _hist_nch_pp;
+    AIDA::IHistogram1D* _hist_nch_ppbar;
+    AIDA::IHistogram1D* _hist_eta_pp;
+    AIDA::IHistogram1D* _hist_eta_ppbar;
+    //@}
+    
+  };
+  
+  
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<UA5_1982_S875503> plugin_UA5_1982_S875503;
+  
+}

Copied: trunk/src/Analyses/UA5_1986_S1583476.cc (from r1802, trunk/src/Analyses/SPS/UA5_1986_S1583476.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/UA5_1986_S1583476.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/SPS/UA5_1986_S1583476.cc)
@@ -0,0 +1,188 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/Beam.hh"
+
+namespace Rivet {
+
+  class UA5_1986_S1583476 : public Analysis {
+  public:
+
+    /// Constructor
+    UA5_1986_S1583476() 
+      : Analysis("UA5_1986_S1583476") 
+    {
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(Beam(), "Beams");
+      
+      // All charged final state particles, needed for triggers
+      const ChargedFinalState cfs;
+      addProjection(cfs, "CFSAll");
+      // Charged particles in |eta| < 5.0 
+      const ChargedFinalState cfs50(-5.0, 5.0);
+      addProjection(cfs50, "CFS50");
+    }
+    
+
+
+    /// @name Analysis methods
+    //@{
+    
+    void init() {
+      _hist_eta_nsd_200      = bookHistogram1D(1,1,1);
+      _hist_eta_inelastic_200  = bookHistogram1D(1,1,2);
+      _hist_eta_nsd_900      = bookHistogram1D(1,1,3);
+      _hist_eta_inelastic_900  = bookHistogram1D(1,1,4);
+      
+      _hist_eta_nsd_n_2_200  = bookHistogram1D(2,1,1);
+      _hist_eta_nsd_n_12_200 = bookHistogram1D(2,1,2);
+      _hist_eta_nsd_n_22_200 = bookHistogram1D(2,1,3);
+      _hist_eta_nsd_n_32_200 = bookHistogram1D(2,1,4);
+      _hist_eta_nsd_n_42_200 = bookHistogram1D(2,1,5);
+      _hist_eta_nsd_n_52_200 = bookHistogram1D(2,1,6);
+      
+      _hist_eta_nsd_n_2_900  = bookHistogram1D(3,1,1);
+      _hist_eta_nsd_n_12_900 = bookHistogram1D(3,1,2);
+      _hist_eta_nsd_n_22_900 = bookHistogram1D(3,1,3);
+      _hist_eta_nsd_n_32_900 = bookHistogram1D(3,1,4);
+      _hist_eta_nsd_n_42_900 = bookHistogram1D(3,1,5);
+      _hist_eta_nsd_n_52_900 = bookHistogram1D(3,1,6);
+      _hist_eta_nsd_n_62_900 = bookHistogram1D(3,1,7);
+      _hist_eta_nsd_n_72_900 = bookHistogram1D(3,1,8);
+      _hist_eta_nsd_n_82_900 = bookHistogram1D(3,1,9);
+    }
+    
+    
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS();
+      const double weight = event.weight();
+      
+      // Trigger requirements from the hodoscopes (1 arm (elastic) and 2 arms (NSD))
+      int n_trig_1(0), n_trig_2(0);
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFSAll");
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.6, -2.0)) n_trig_1 += 1;
+        else if (inRange(eta, 2.0, 5.6)) n_trig_2 += 1;
+      }
+      getLog() << Log::DEBUG << "Trigger -: " << n_trig_1 << ", Trigger +: " << n_trig_2 << endl;
+      
+      // Check if we have a coincidence hit in hodoscopes == NSD
+      if (n_trig_1 == 0 && n_trig_2 == 0) vetoEvent
+      // Require at least one hit in trigger hodoscopes
+      bool isNSD = true;
+      if (n_trig_1 == 0 || n_trig_2 == 0) isNSD = false;
+      
+      // Declare final state for |eta| < 5.0
+      const ChargedFinalState& cfs50 = applyProjection<ChargedFinalState>(event, "CFS50");
+      int numP = cfs50.particles().size();
+
+
+      // Iterate over particles in |eta| < 5.0 and fill histos with |eta| 
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = fabs(p.momentum().pseudorapidity());
+        
+        // Fill 200 GeV histos
+        if (fuzzyEquals(sqrtS, 200.0, 1E-4)) {
+          // Fill histos that don't require a certain multiplicity
+          _hist_eta_inelastic_200->fill(eta, weight);
+          if ( isNSD ) {
+            // Fill histos that require a certain multiplicity
+            _hist_eta_nsd_200->fill(eta, weight);
+            if ( ( 2 <= numP ) && ( numP <= 10 ) ) _hist_eta_nsd_n_2_200->fill(eta, weight);
+            else if ( ( 12 <= numP ) && ( numP <= 20 ) ) _hist_eta_nsd_n_12_200->fill(eta, weight);
+            else if ( ( 22 <= numP ) && ( numP <= 30 ) ) _hist_eta_nsd_n_22_200->fill(eta, weight);
+            else if ( ( 32 <= numP ) && ( numP <= 40 ) ) _hist_eta_nsd_n_32_200->fill(eta, weight);
+            else if ( ( 42 <= numP ) && ( numP <= 50 ) ) _hist_eta_nsd_n_42_200->fill(eta, weight);
+            else if ( numP >= 52 ) _hist_eta_nsd_n_52_200->fill(eta, weight);
+          }
+        }
+        
+        // Fill 900 GeV histos
+        else if (fuzzyEquals(sqrtS, 900.0, 1E-4)) {
+          // Fill histos that don't require a certain multiplicity
+          _hist_eta_inelastic_900->fill(eta, weight);
+          if ( isNSD ) {
+            // Fill histos that require a certain multiplicity
+            _hist_eta_nsd_900->fill(eta, weight);
+            if ( ( 2 <= numP ) && ( numP <= 10 ) ) _hist_eta_nsd_n_2_900->fill(eta, weight);
+            else if ( ( 12 <= numP ) && ( numP <= 20 ) ) _hist_eta_nsd_n_12_900->fill(eta, weight);
+            else if ( ( 22 <= numP ) && ( numP <= 30 ) ) _hist_eta_nsd_n_22_900->fill(eta, weight);
+            else if ( ( 32 <= numP ) && ( numP <= 40 ) ) _hist_eta_nsd_n_32_900->fill(eta, weight);
+            else if ( ( 42 <= numP ) && ( numP <= 50 ) ) _hist_eta_nsd_n_42_900->fill(eta, weight);
+            else if ( ( 52 <= numP ) && ( numP <= 60 ) ) _hist_eta_nsd_n_52_900->fill(eta, weight);
+            else if ( ( 62 <= numP ) && ( numP <= 70 ) ) _hist_eta_nsd_n_62_900->fill(eta, weight);
+            else if ( ( 72 <= numP ) && ( numP <= 80 ) ) _hist_eta_nsd_n_72_900->fill(eta, weight);
+            else if ( numP >= 82 ) _hist_eta_nsd_n_82_900->fill(eta, weight);
+          }
+        } 
+      }        
+  }
+  
+  
+    void finalize() {
+      // Scale histos to the area of the corresponding reference histos
+      normalize(_hist_eta_nsd_200, 10.2225);    
+      normalize(_hist_eta_inelastic_200, 9.255);
+      normalize(_hist_eta_nsd_900, 15.285);
+      normalize(_hist_eta_inelastic_900, 13.9725);
+      
+      normalize(_hist_eta_nsd_n_2_200, 3.285);
+      normalize(_hist_eta_nsd_n_12_200, 7.34);
+      normalize(_hist_eta_nsd_n_22_200, 12.02);
+      normalize(_hist_eta_nsd_n_32_200, 17.2);
+      normalize(_hist_eta_nsd_n_42_200, 21.99);
+      normalize(_hist_eta_nsd_n_52_200, 27.8);
+      
+      normalize(_hist_eta_nsd_n_2_900, 2.7);
+      normalize(_hist_eta_nsd_n_12_900, 6.425);
+      normalize(_hist_eta_nsd_n_22_900, 10.54);
+      normalize(_hist_eta_nsd_n_32_900, 15.225);
+      normalize(_hist_eta_nsd_n_42_900, 19.885);
+      normalize(_hist_eta_nsd_n_52_900, 25.13);
+      normalize(_hist_eta_nsd_n_62_900, 29.235);
+      normalize(_hist_eta_nsd_n_72_900, 33.81);
+      normalize(_hist_eta_nsd_n_82_900, 41.75);      
+    }
+    
+
+  private:
+
+    /// @name Histograms
+    //@{
+    // Histos of Figure 1 (HepData Table 1)
+    AIDA::IHistogram1D *_hist_eta_nsd_200;
+    AIDA::IHistogram1D *_hist_eta_inelastic_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_900;
+    AIDA::IHistogram1D *_hist_eta_inelastic_900;
+
+    // Histos of Figure 3a (HepData Table 2)
+    AIDA::IHistogram1D *_hist_eta_nsd_n_2_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_12_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_22_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_32_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_42_200;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_52_200;
+
+    // Histos of Figure 3b (HepData Table 3)
+    AIDA::IHistogram1D *_hist_eta_nsd_n_2_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_12_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_22_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_32_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_42_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_52_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_62_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_72_900;
+    AIDA::IHistogram1D *_hist_eta_nsd_n_82_900;
+    //@}
+
+  };
+ 
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<UA5_1986_S1583476> plugin_UA5_1986_S1583476;
+  
+}

Copied: trunk/src/Analyses/UA5_1988_S1867512.cc (from r1802, trunk/src/Analyses/SPS/UA5_1988_S1867512.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/UA5_1988_S1867512.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/SPS/UA5_1988_S1867512.cc)
@@ -0,0 +1,399 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+#include "Rivet/Projections/Beam.hh"
+
+namespace Rivet {
+
+  /// @todo Move these into the MathUtils header.
+
+  // A simple function to calculate the mean of a sample
+  double mean(const vector<int>& sample) {
+    double mean = 0.0;
+    foreach (const int& i, sample) {
+      mean += i;
+    }
+    return mean/sample.size();
+  }
+  
+  
+  // Calculate the covariance (variance) between two samples  
+  double covariance(const vector<int>& sample1, const vector<int>& sample2) {
+    double mean1 = mean(sample1);
+    double mean2 = mean(sample2);
+    int N = sample1.size();
+    double cov = 0.0;
+    for (int i = 0; i < N; i++) {
+      double cov_i = (sample1[i] - mean1)*(sample2[i] - mean2);
+      cov += cov_i;
+    }
+    if (N > 1) return cov/(N-1);
+    else return 0.0;
+  }
+  
+  
+  // Calculate the correlation strength between two samples
+  double correlation(const vector<int>& sample1, const vector<int>& sample2) {
+    const double cov = covariance(sample1, sample2);
+    const double var1 = covariance(sample1, sample1);
+    const double var2 = covariance(sample2, sample2);
+    const double correlation = cov/sqrt(var1*var2);
+    const double corr_strength = correlation*sqrt(var2/var1);
+    return corr_strength;
+  }
+
+}
+
+
+namespace Rivet {
+  
+
+  class UA5_1988_S1867512 : public Analysis {
+  public:
+
+    UA5_1988_S1867512()
+      : Analysis("UA5_1988_S1867512")
+    {
+      /// @todo Set approriate for your analysis
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(Beam(), "Beams");
+      
+      // All charged final state particles, needed for trigger implementation only
+      const ChargedFinalState cfs;
+      addProjection(cfs,   "CFSAll");
+      
+      // Symmetric intervals first
+      // Maybe its possible to define symmetric eta intervals with gaps
+      // Forward eta intervals
+      const ChargedFinalState cfs10f(0.0, 1.0);
+      const ChargedFinalState cfs15f(0.5, 1.5);
+      const ChargedFinalState cfs20f(1.0, 2.0);
+      const ChargedFinalState cfs25f(1.5, 2.5);
+      const ChargedFinalState cfs30f(2.0, 3.0);
+      const ChargedFinalState cfs35f(2.5, 3.5);
+      const ChargedFinalState cfs40f(3.0, 4.0);
+      
+      // Backward eta intervals
+      const ChargedFinalState cfs10b(-1.0,  0.0);
+      const ChargedFinalState cfs15b(-1.5, -0.5);
+      const ChargedFinalState cfs20b(-2.0, -1.0);
+      const ChargedFinalState cfs25b(-2.5, -1.5);
+      const ChargedFinalState cfs30b(-3.0, -2.0);
+      const ChargedFinalState cfs35b(-3.5, -2.5);
+      const ChargedFinalState cfs40b(-4.0, -3.0);
+      
+      // Symmetric eta interval
+      const ChargedFinalState cfs05(-0.5,  0.5);
+      
+      addProjection(cfs10f, "CFS10F");
+      addProjection(cfs15f, "CFS15F");
+      addProjection(cfs20f, "CFS20F");
+      addProjection(cfs25f, "CFS25F");
+      addProjection(cfs30f, "CFS30F");
+      addProjection(cfs35f, "CFS35F");
+      addProjection(cfs40f, "CFS40F");
+      
+      addProjection(cfs10b, "CFS10B");
+      addProjection(cfs15b, "CFS15B");
+      addProjection(cfs20b, "CFS20B");
+      addProjection(cfs25b, "CFS25B");
+      addProjection(cfs30b, "CFS30B");
+      addProjection(cfs35b, "CFS35B");
+      addProjection(cfs40b, "CFS40B");
+      
+      addProjection(cfs05, "CFS05");
+    }
+    
+    
+    /// @name Analysis methods
+    //@{
+
+    void init() {
+      // Histogram booking, we have sqrt(s) = 200, 546 and 900 GeV
+      _hist_correl_10_200 = bookHistogram1D(1, 1, 1);
+      _hist_correl_10_546 = bookHistogram1D(1, 1, 2);
+      _hist_correl_10_900 = bookHistogram1D(1, 1, 3);
+      
+      _hist_correl_15_200 = bookHistogram1D(2, 1, 1);
+      _hist_correl_15_546 = bookHistogram1D(2, 1, 2);
+      _hist_correl_15_900 = bookHistogram1D(2, 1, 3);
+      
+      _hist_correl_20_200 = bookHistogram1D(3, 1, 1);
+      _hist_correl_20_546 = bookHistogram1D(3, 1, 2);
+      _hist_correl_20_900 = bookHistogram1D(3, 1, 3);
+      
+      _hist_correl_25_200 = bookHistogram1D(4, 1, 1);
+      _hist_correl_25_546 = bookHistogram1D(4, 1, 2);
+      _hist_correl_25_900 = bookHistogram1D(4, 1, 3);
+      
+      _hist_correl_30_200 = bookHistogram1D(5, 1, 1);
+      _hist_correl_30_546 = bookHistogram1D(5, 1, 2);
+      _hist_correl_30_900 = bookHistogram1D(5, 1, 3);
+      
+      _hist_correl_35_200 = bookHistogram1D(6, 1, 1);
+      _hist_correl_35_546 = bookHistogram1D(6, 1, 2);
+      _hist_correl_35_900 = bookHistogram1D(6, 1, 3);
+      
+      _hist_correl_40_200 = bookHistogram1D(7, 1, 1);
+      _hist_correl_40_546 = bookHistogram1D(7, 1, 2);
+      _hist_correl_40_900 = bookHistogram1D(7, 1, 3);
+      
+      _hist_correl_asym_15_200 = bookHistogram1D(8, 1, 1);
+      _hist_correl_asym_15_546 = bookHistogram1D(8, 1, 2);
+      _hist_correl_asym_15_900 = bookHistogram1D(8, 1, 3);
+      
+      _hist_correl_asym_20_200 = bookHistogram1D(9, 1, 1);
+      _hist_correl_asym_20_546 = bookHistogram1D(9, 1, 2);
+      _hist_correl_asym_20_900 = bookHistogram1D(9, 1, 3);
+      
+      _hist_correl_asym_25_200 = bookHistogram1D(10, 1, 1);
+      _hist_correl_asym_25_546 = bookHistogram1D(10, 1, 2);
+      _hist_correl_asym_25_900 = bookHistogram1D(10, 1, 3);
+      
+      _hist_correl_asym_30_200 = bookHistogram1D(11, 1, 1);
+      _hist_correl_asym_30_546 = bookHistogram1D(11, 1, 2);
+      _hist_correl_asym_30_900 = bookHistogram1D(11, 1, 3);
+    }
+    
+    
+
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS();
+      const double weight = event.weight();
+      
+      // Minimum Bias trigger requirements from the hodoscopes
+      int n_trig_1 = 0;
+      int n_trig_2 = 0;
+      
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFSAll");
+      foreach (const Particle& p, cfs.particles()) {
+        double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.6, -2.0)) n_trig_1++;
+        else if (inRange(eta, 2.0, 5.6)) n_trig_2++;
+      }
+      
+      // Require at least one coincidence hit in trigger hodoscopes
+      if (n_trig_1 == 0 || n_trig_2 == 0) vetoEvent;
+      getLog() << Log::DEBUG << "Trigger 1: " << n_trig_1 << " Trigger 2: " << n_trig_2 << endl;
+      
+      // Count forward/backward rates
+      n_10f += applyProjection<ChargedFinalState>(event, "CFS10F").size();
+      n_15f += applyProjection<ChargedFinalState>(event, "CFS15F").size();
+      n_20f += applyProjection<ChargedFinalState>(event, "CFS20F").size();
+      n_25f += applyProjection<ChargedFinalState>(event, "CFS25F").size();
+      n_30f += applyProjection<ChargedFinalState>(event, "CFS30F").size();
+      n_35f += applyProjection<ChargedFinalState>(event, "CFS35F").size();
+      n_40f += applyProjection<ChargedFinalState>(event, "CFS40F").size();
+      //
+      n_10b += applyProjection<ChargedFinalState>(event, "CFS10B").size();
+      n_15b += applyProjection<ChargedFinalState>(event, "CFS15B").size();
+      n_20b += applyProjection<ChargedFinalState>(event, "CFS20B").size();
+      n_25b += applyProjection<ChargedFinalState>(event, "CFS25B").size();
+      n_30b += applyProjection<ChargedFinalState>(event, "CFS30B").size();
+      n_35b += applyProjection<ChargedFinalState>(event, "CFS35B").size();
+      n_40b += applyProjection<ChargedFinalState>(event, "CFS40B").size();
+      //
+      n_05 += applyProjection<ChargedFinalState>(event, "CFS05").size();
+      
+      // Dummy fill
+      if (fuzzyEquals(sqrtS, 200.0, 1E-4)) {
+        _hist_correl_10_200->fill(_hist_correl_10_200->binMean(0), weight);
+        _hist_correl_15_200->fill(_hist_correl_15_200->binMean(0), weight);
+        _hist_correl_20_200->fill(_hist_correl_20_200->binMean(0), weight);
+        _hist_correl_25_200->fill(_hist_correl_25_200->binMean(0), weight);
+        _hist_correl_30_200->fill(_hist_correl_30_200->binMean(0), weight);
+        _hist_correl_35_200->fill(_hist_correl_35_200->binMean(0), weight);
+        _hist_correl_40_200->fill(_hist_correl_40_200->binMean(0), weight);
+        _hist_correl_asym_15_200->fill(_hist_correl_asym_15_200->binMean(0), weight);
+        _hist_correl_asym_20_200->fill(_hist_correl_asym_20_200->binMean(0), weight);
+        _hist_correl_asym_25_200->fill(_hist_correl_asym_25_200->binMean(0), weight);
+        _hist_correl_asym_30_200->fill(_hist_correl_asym_30_200->binMean(0), weight);
+      }
+      
+      else if (fuzzyEquals(sqrtS, 546.0, 1E-4)) {
+        _hist_correl_10_546->fill(_hist_correl_10_546->binMean(0), weight);
+        _hist_correl_15_546->fill(_hist_correl_15_546->binMean(0), weight);
+        _hist_correl_20_546->fill(_hist_correl_20_546->binMean(0), weight);
+        _hist_correl_25_546->fill(_hist_correl_25_546->binMean(0), weight);
+        _hist_correl_30_546->fill(_hist_correl_30_546->binMean(0), weight);
+        _hist_correl_35_546->fill(_hist_correl_35_546->binMean(0), weight);
+        _hist_correl_40_546->fill(_hist_correl_40_546->binMean(0), weight);
+        _hist_correl_asym_15_546->fill(_hist_correl_asym_15_546->binMean(0), weight);
+        _hist_correl_asym_20_546->fill(_hist_correl_asym_20_546->binMean(0), weight);
+        _hist_correl_asym_25_546->fill(_hist_correl_asym_25_546->binMean(0), weight);
+        _hist_correl_asym_30_546->fill(_hist_correl_asym_30_546->binMean(0), weight);
+      }
+      
+      else if (fuzzyEquals(sqrtS, 900.0, 1E-4)) {
+        _hist_correl_10_900->fill(_hist_correl_10_900->binMean(0), weight);
+        _hist_correl_15_900->fill(_hist_correl_15_900->binMean(0), weight);
+        _hist_correl_20_900->fill(_hist_correl_20_900->binMean(0), weight);
+        _hist_correl_25_900->fill(_hist_correl_25_900->binMean(0), weight);
+        _hist_correl_30_900->fill(_hist_correl_30_900->binMean(0), weight);
+        _hist_correl_35_900->fill(_hist_correl_35_900->binMean(0), weight);
+        _hist_correl_40_900->fill(_hist_correl_40_900->binMean(0), weight);
+        _hist_correl_asym_15_900->fill(_hist_correl_asym_15_900->binMean(0), weight);
+        _hist_correl_asym_20_900->fill(_hist_correl_asym_20_900->binMean(0), weight);
+        _hist_correl_asym_25_900->fill(_hist_correl_asym_25_900->binMean(0), weight);
+        _hist_correl_asym_30_900->fill(_hist_correl_asym_30_900->binMean(0), weight);
+      }
+    }
+    
+    
+    void finalize() {
+      // Get the correlation coefficients
+      //
+      // Symmetric eta intervals first
+      double correlation_cfs10 = correlation(n_10f, n_10b);
+      double correlation_cfs15 = correlation(n_15f, n_15b);
+      double correlation_cfs20 = correlation(n_20f, n_20b);
+      double correlation_cfs25 = correlation(n_25f, n_25b);
+      double correlation_cfs30 = correlation(n_30f, n_30b);
+      double correlation_cfs35 = correlation(n_35f, n_35b);
+      double correlation_cfs40 = correlation(n_40f, n_40b);
+
+      // Assymetric eta intervals
+      //  1.5 ... 2.5 & -1.5 ... -0.5
+      double correlation_as_cfs15 = correlation(n_25f, n_15b);
+      //  2.0 ... 3.0 & -1.0 ...  0.0
+      double correlation_as_cfs20 = correlation(n_30f, n_10b);
+      //  2.5 ... 3.5 & -0.5 ...  0.5
+      double correlation_as_cfs25 = correlation(n_35f, n_05);
+      //  3.0 ... 4.0 &  0.0 ...  1.0
+      double correlation_as_cfs30 = correlation(n_40f, n_10f);
+
+      normalize(_hist_correl_10_200, correlation_cfs10);
+      normalize(_hist_correl_10_546, correlation_cfs10);
+      normalize(_hist_correl_10_900, correlation_cfs10);
+      
+      normalize(_hist_correl_15_200, correlation_cfs15);
+      normalize(_hist_correl_15_546, correlation_cfs15);
+      normalize(_hist_correl_15_900, correlation_cfs15);
+      
+      normalize(_hist_correl_20_200, correlation_cfs20);
+      normalize(_hist_correl_20_546, correlation_cfs20);
+      normalize(_hist_correl_20_900, correlation_cfs20);
+      
+      normalize(_hist_correl_25_200, correlation_cfs25);
+      normalize(_hist_correl_25_546, correlation_cfs25);
+      normalize(_hist_correl_25_900, correlation_cfs25);
+      
+      normalize(_hist_correl_30_200, correlation_cfs30);
+      normalize(_hist_correl_30_546, correlation_cfs30);
+      normalize(_hist_correl_30_900, correlation_cfs30);
+      
+      normalize(_hist_correl_35_200, correlation_cfs35);
+      normalize(_hist_correl_35_546, correlation_cfs35);
+      normalize(_hist_correl_35_900, correlation_cfs35);
+      
+      normalize(_hist_correl_40_200, correlation_cfs40);
+      normalize(_hist_correl_40_546, correlation_cfs40);
+      normalize(_hist_correl_40_900, correlation_cfs40);
+      
+      normalize(_hist_correl_asym_15_200, correlation_as_cfs15);
+      normalize(_hist_correl_asym_15_546, correlation_as_cfs15);
+      normalize(_hist_correl_asym_15_900, correlation_as_cfs15);
+      
+      normalize(_hist_correl_asym_20_200, correlation_as_cfs20);
+      normalize(_hist_correl_asym_20_546, correlation_as_cfs20);
+      normalize(_hist_correl_asym_20_900, correlation_as_cfs20);
+      
+      normalize(_hist_correl_asym_25_200, correlation_as_cfs25);
+      normalize(_hist_correl_asym_25_546, correlation_as_cfs25);
+      normalize(_hist_correl_asym_25_900, correlation_as_cfs25);
+      
+      normalize(_hist_correl_asym_30_200, correlation_as_cfs30);
+      normalize(_hist_correl_asym_30_546, correlation_as_cfs30);
+      normalize(_hist_correl_asym_30_900, correlation_as_cfs30);      
+    }
+    
+    //@}
+    
+    
+  private:
+    
+    /// @name Vectors for storing the number of particles in the different eta intervals per event.
+    /// @todo Is there a better way?
+    //@{
+    
+    std::vector<int> n_10f;
+    std::vector<int> n_15f;
+    std::vector<int> n_20f;
+    std::vector<int> n_25f;
+    std::vector<int> n_30f;
+    std::vector<int> n_35f;
+    std::vector<int> n_40f;
+                           
+    std::vector<int> n_10b;
+    std::vector<int> n_15b;
+    std::vector<int> n_20b;
+    std::vector<int> n_25b;
+    std::vector<int> n_30b;
+    std::vector<int> n_35b;
+    std::vector<int> n_40b;
+   
+    std::vector<int> n_05;
+
+    //@}
+
+
+    /// @name Histograms
+    //@{
+
+    // Symmetric eta intervals
+    AIDA::IHistogram1D *_hist_correl_10_200;
+    AIDA::IHistogram1D *_hist_correl_10_546;
+    AIDA::IHistogram1D *_hist_correl_10_900;
+
+    AIDA::IHistogram1D *_hist_correl_15_200;
+    AIDA::IHistogram1D *_hist_correl_15_546;
+    AIDA::IHistogram1D *_hist_correl_15_900;
+
+    AIDA::IHistogram1D *_hist_correl_20_200;
+    AIDA::IHistogram1D *_hist_correl_20_546;
+    AIDA::IHistogram1D *_hist_correl_20_900;
+    
+    AIDA::IHistogram1D *_hist_correl_25_200;
+    AIDA::IHistogram1D *_hist_correl_25_546;
+    AIDA::IHistogram1D *_hist_correl_25_900;
+    
+    AIDA::IHistogram1D *_hist_correl_30_200;
+    AIDA::IHistogram1D *_hist_correl_30_546;
+    AIDA::IHistogram1D *_hist_correl_30_900;
+
+    AIDA::IHistogram1D *_hist_correl_35_200;
+    AIDA::IHistogram1D *_hist_correl_35_900;
+    AIDA::IHistogram1D *_hist_correl_35_546;
+    
+    AIDA::IHistogram1D *_hist_correl_40_200;
+    AIDA::IHistogram1D *_hist_correl_40_546;
+    AIDA::IHistogram1D *_hist_correl_40_900;
+    
+    // For asymmetric eta intervals
+    AIDA::IHistogram1D *_hist_correl_asym_15_200;
+    AIDA::IHistogram1D *_hist_correl_asym_15_546;
+    AIDA::IHistogram1D *_hist_correl_asym_15_900;
+                                      
+    AIDA::IHistogram1D *_hist_correl_asym_20_200;
+    AIDA::IHistogram1D *_hist_correl_asym_20_546;
+    AIDA::IHistogram1D *_hist_correl_asym_20_900;
+                                      
+    AIDA::IHistogram1D *_hist_correl_asym_25_200;
+    AIDA::IHistogram1D *_hist_correl_asym_25_546;
+    AIDA::IHistogram1D *_hist_correl_asym_25_900;
+                                      
+    AIDA::IHistogram1D *_hist_correl_asym_30_200;
+    AIDA::IHistogram1D *_hist_correl_asym_30_546;
+    AIDA::IHistogram1D *_hist_correl_asym_30_900;
+    //@}
+
+  };
+
+
+  
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<UA5_1988_S1867512> plugin_UA5_1988_S1867512;
+
+}

Copied: trunk/src/Analyses/UA5_1989_S1926373.cc (from r1802, trunk/src/Analyses/SPS/UA5_1989_S1926373.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/UA5_1989_S1926373.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/SPS/UA5_1989_S1926373.cc)
@@ -0,0 +1,151 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/Beam.hh"
+#include "Rivet/Projections/ChargedFinalState.hh"
+
+namespace Rivet {
+
+
+  class UA5_1989_S1926373 : public Analysis {
+  public:
+    
+    /// Constructor
+    UA5_1989_S1926373()
+      : Analysis("UA5_1989_S1926373")
+    { 
+      setBeams(PROTON, ANTIPROTON);
+      addProjection(Beam(), "Beams");
+      addProjection(ChargedFinalState(), "CFSAll");
+      addProjection(ChargedFinalState(-0.5, 0.5), "CFS05");
+      addProjection(ChargedFinalState(-1.5, 1.5), "CFS15");
+      addProjection(ChargedFinalState(-3.0, 3.0), "CFS30");
+      addProjection(ChargedFinalState(-5.0, 5.0), "CFS50");
+      _numVetoed = 0;
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    /// Book histograms
+    void init() {
+      // NB. _hist_nch{200,900} and _hist_nch{200,900}eta50 use the same data but different binning
+      _hist_nch200       = bookHistogram1D(1,1,1); 
+      _hist_nch900       = bookHistogram1D(2,1,1);
+      _hist_nch200eta05  = bookHistogram1D(3,1,1);
+      _hist_nch200eta15  = bookHistogram1D(4,1,1);
+      _hist_nch200eta30  = bookHistogram1D(5,1,1);
+      _hist_nch200eta50  = bookHistogram1D(6,1,1);
+      _hist_nch900eta05  = bookHistogram1D(7,1,1);
+      _hist_nch900eta15  = bookHistogram1D(8,1,1);
+      _hist_nch900eta30  = bookHistogram1D(9,1,1);
+      _hist_nch900eta50  = bookHistogram1D(10,1,1);
+      _hist_mean_nch_200 = bookHistogram1D(11,1,1); 
+      _hist_mean_nch_900 = bookHistogram1D(12,1,1);
+    } 
+    
+    
+    /// Do the analysis
+    void analyze(const Event& event) {
+      const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS();
+      const double weight = event.weight();
+      
+      // Minimum Bias trigger requirements from the hodoscopes
+      int n_trig_1(0), n_trig_2(0);
+      /// @todo Use CFS in +,- eta ranges as below, to cache this loop between UA5 analyses
+      const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFSAll");
+      foreach (const Particle& p, cfs.particles()) {
+        const double eta = p.momentum().pseudorapidity();
+        if (inRange(eta, -5.6, -2.0)) n_trig_1 += 1;
+        else if (inRange(eta, 2.0, 5.6)) n_trig_2 += 1;
+      }
+      
+      // Require at least one coincidence hit in trigger hodoscopes
+      getLog() << Log::DEBUG << "Trigger -: " << n_trig_1 << ", Trigger +: " << n_trig_2 << endl;
+      if (n_trig_1 == 0 || n_trig_2 == 0) {
+        _numVetoed +=1;
+        vetoEvent;
+      }
+      
+      // Count final state particles in several eta regions
+      const int numP05 = applyProjection<ChargedFinalState>(event, "CFS05").size();
+      const int numP15 = applyProjection<ChargedFinalState>(event, "CFS15").size();
+      const int numP30 = applyProjection<ChargedFinalState>(event, "CFS30").size();
+      const int numP50 = applyProjection<ChargedFinalState>(event, "CFS50").size();
+      
+      // Fill histograms
+      if (fuzzyEquals(sqrtS, 200.0, 1E-4)) {
+        _hist_nch200->fill(numP50, weight);
+        _hist_nch200eta05->fill(numP05, weight);
+        _hist_nch200eta15->fill(numP15, weight);
+        _hist_nch200eta30->fill(numP30, weight);
+        _hist_nch200eta50->fill(numP50, weight);
+        _hist_mean_nch_200->fill(_hist_mean_nch_200->binMean(0), numP50);
+      }
+      else if (fuzzyEquals(sqrtS, 900.0, 1E-4)) {
+        _hist_nch900->fill(numP50, weight);
+        _hist_nch900eta05->fill(numP05, weight);
+        _hist_nch900eta15->fill(numP15, weight);
+        _hist_nch900eta30->fill(numP30, weight);
+        _hist_nch900eta50->fill(numP50, weight);
+        _hist_mean_nch_900->fill(_hist_mean_nch_900->binMean(0), numP50);
+      }
+    }
+    
+    
+    
+    void finalize() {
+      // Normalise to area of refhistos
+      /// @todo Use generator cross-sections
+      normalize(_hist_nch200, 2.011);
+      normalize(_hist_nch900, 2.0434);
+      normalize(_hist_nch200eta05, 1.01255);
+      normalize(_hist_nch200eta15, 1.0191);
+      normalize(_hist_nch200eta30, 1.02615);
+      normalize(_hist_nch200eta50, 1.03475);
+      normalize(_hist_nch900eta05, 1.0035);
+      normalize(_hist_nch900eta15, 1.01405);
+      normalize(_hist_nch900eta30, 1.03055);
+      normalize(_hist_nch900eta50, 1.02791);
+      // Scale to total number of weights
+      scale(_hist_mean_nch_200, 1.0/sumOfWeights());
+      scale(_hist_mean_nch_900, 1.0/sumOfWeights());
+      
+      // Print trigger statistics
+      getLog() << Log::INFO << "No. events vetoed: " << _numVetoed << endl;
+      getLog() << Log::INFO << "No. events accepted: " << sumOfWeights() - _numVetoed << endl;
+      getLog() << Log::INFO << "Relative trigger rate: " << 100.0*(sumOfWeights() - _numVetoed)/sumOfWeights() << "%" << endl;
+    }
+
+    //@}
+
+
+  private:
+    
+    //@{
+    /// Histograms
+    AIDA::IHistogram1D* _hist_nch200;
+    AIDA::IHistogram1D* _hist_nch900;
+    AIDA::IHistogram1D* _hist_nch200eta05;
+    AIDA::IHistogram1D* _hist_nch200eta15;
+    AIDA::IHistogram1D* _hist_nch200eta30;
+    AIDA::IHistogram1D* _hist_nch200eta50;
+    AIDA::IHistogram1D* _hist_nch900eta05;
+    AIDA::IHistogram1D* _hist_nch900eta15;
+    AIDA::IHistogram1D* _hist_nch900eta30;
+    AIDA::IHistogram1D* _hist_nch900eta50;
+    AIDA::IHistogram1D* _hist_mean_nch_200;
+    AIDA::IHistogram1D* _hist_mean_nch_900;
+    //@}
+
+    unsigned int _numVetoed;
+  };
+
+  
+
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<UA5_1989_S1926373> plugin_UA5_1989_S1926373;
+
+}

Copied: trunk/src/Analyses/ZEUS_2001_S4815815.cc (from r1802, trunk/src/Analyses/HERA/ZEUS_2001_S4815815.cc)
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ trunk/src/Analyses/ZEUS_2001_S4815815.cc	Wed Sep  2 11:16:20 2009	(r1803, copy of r1802, trunk/src/Analyses/HERA/ZEUS_2001_S4815815.cc)
@@ -0,0 +1,81 @@
+// -*- C++ -*-
+#include "Rivet/Analysis.hh"
+#include "Rivet/RivetAIDA.hh"
+#include "Rivet/Tools/Logging.hh"
+#include "Rivet/Projections/FastJets.hh"
+
+namespace Rivet {
+
+
+  /// @brief ZEUS dijet photoproduction study used in the ZEUS Jets PDF fit
+  ///
+  /// This class is a reproduction of the HZTool routine for the ZEUS 
+  /// dijet photoproduction paper which was used in the ZEUS Jets PDF fit.  
+  ///
+  /// @author Jon Butterworth
+  class ZEUS_2001_S4815815 : public Analysis {
+
+  public:
+
+    /// Default constructor.
+    ZEUS_2001_S4815815()
+      : Analysis("ZEUS_2001_S4815815") 
+    { 
+      setBeams(POSITRON, PROTON);
+      FinalState fs;
+      addProjection(fs, "FS");
+      /// @todo This is the *wrong* jet def: correct it!
+      getLog() << Log::WARN << "This analysis uses the wrong jet definition: the " 
+               << "paper just says 'a cone algorithm was applied to the CAL cells and jets "
+               << "were reconstructed using the energies and positions of these cells'" << endl;
+      addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
+    }
+
+
+    /// @name Analysis methods
+    //@{
+
+    // Book histograms
+    void init() {
+      /// @todo This doesn't seem to correspond to the plots in the paper (SPIRES 4730372)
+      _histJetEt1 = bookHistogram1D("JetET1", 11, 14.0, 75.0);
+    }
+
+
+    // Do the analysis
+    void analyze(const Event& event) {
+      const FastJets& jets = applyProjection<FastJets>(event, "Jets");
+      const size_t nj = jets.size();
+      getLog() << Log::INFO << "Jet multiplicity = " << nj << endl;
+      
+      // Fill histograms
+      PseudoJets jetList = jets.pseudoJets();
+      for (PseudoJets::const_iterator j = jetList.begin(); j != jetList.end(); ++j) {
+        _histJetEt1->fill(j->perp(), event.weight() );
+      }
+    }
+    
+    
+    // Finalize
+    void finalize() { 
+      //
+    }
+
+    //@}
+
+  private:
+
+    /// @name Histograms
+    //@{
+    AIDA::IHistogram1D* _histJetEt1;
+    //@}
+
+
+  };
+
+    
+    
+  // This global object acts as a hook for the plugin system
+  AnalysisBuilder<ZEUS_2001_S4815815> plugin_ZEUS_2001_S4815815;
+
+}


More information about the Rivet-svn mailing list