[Rivet-svn] r2080 - in trunk: include/LWH include/Rivet include/Rivet/Analyses include/Rivet/Math include/Rivet/Math/eigen include/Rivet/Projections include/Rivet/Tools include/TinyXML src src/Analyses src/Projections src/Test src/Tools src/Tools/TinyXML

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Thu Nov 19 15:02:56 GMT 2009


Author: buckley
Date: Thu Nov 19 15:02:51 2009
New Revision: 2080

Log:
Pedantic cleanup of trailing spaces, since www.codinghorror.com/blog/archives/001310.html makes me feel itchy

Modified:
   trunk/include/LWH/AIBaseHistogram.h
   trunk/include/LWH/AIDataPoint.h
   trunk/include/LWH/AIDataPointSet.h
   trunk/include/LWH/AIHistogram1D.h
   trunk/include/LWH/AIMeasurement.h
   trunk/include/LWH/AIProfile1D.h
   trunk/include/LWH/AnalysisFactory.h
   trunk/include/LWH/Axis.h
   trunk/include/LWH/DataPointSet.h
   trunk/include/LWH/DataPointSetFactory.h
   trunk/include/LWH/Histogram1D.h
   trunk/include/LWH/Measurement.h
   trunk/include/LWH/Profile1D.h
   trunk/include/LWH/Tree.h
   trunk/include/LWH/VariAxis.h
   trunk/include/Rivet/Analyses/MC_JetAnalysis.hh
   trunk/include/Rivet/Analysis.hh
   trunk/include/Rivet/AnalysisHandler.hh
   trunk/include/Rivet/AnalysisInfo.hh
   trunk/include/Rivet/AnalysisLoader.hh
   trunk/include/Rivet/BeamConstraint.hh
   trunk/include/Rivet/Cmp.hh
   trunk/include/Rivet/Constraints.hh
   trunk/include/Rivet/Event.hh
   trunk/include/Rivet/Exceptions.hh
   trunk/include/Rivet/HistoHandler.hh
   trunk/include/Rivet/Jet.hh
   trunk/include/Rivet/Makefile.am
   trunk/include/Rivet/Math/Constants.hh
   trunk/include/Rivet/Math/MathHeader.hh
   trunk/include/Rivet/Math/MathUtils.hh
   trunk/include/Rivet/Math/MatrixDiag.hh
   trunk/include/Rivet/Math/MatrixN.hh
   trunk/include/Rivet/Math/Units.hh
   trunk/include/Rivet/Math/Vector3.hh
   trunk/include/Rivet/Math/Vector4.hh
   trunk/include/Rivet/Math/VectorN.hh
   trunk/include/Rivet/Math/eigen/matrixbase.h
   trunk/include/Rivet/Math/eigen/projective.h
   trunk/include/Rivet/Math/eigen/vectorbase.h
   trunk/include/Rivet/Projection.hh
   trunk/include/Rivet/ProjectionApplier.hh
   trunk/include/Rivet/ProjectionHandler.hh
   trunk/include/Rivet/Projections/AxesDefinition.hh
   trunk/include/Rivet/Projections/Beam.hh
   trunk/include/Rivet/Projections/ChargedFinalState.hh
   trunk/include/Rivet/Projections/ChargedLeptons.hh
   trunk/include/Rivet/Projections/ClusteredPhotons.hh
   trunk/include/Rivet/Projections/DISKinematics.hh
   trunk/include/Rivet/Projections/DISLepton.hh
   trunk/include/Rivet/Projections/FinalState.hh
   trunk/include/Rivet/Projections/FinalStateHCM.hh
   trunk/include/Rivet/Projections/FoxWolframMoments.hh
   trunk/include/Rivet/Projections/HadronicFinalState.hh
   trunk/include/Rivet/Projections/Hemispheres.hh
   trunk/include/Rivet/Projections/IdentifiedFinalState.hh
   trunk/include/Rivet/Projections/InitialQuarks.hh
   trunk/include/Rivet/Projections/InvMassFinalState.hh
   trunk/include/Rivet/Projections/IsolationEstimators.hh
   trunk/include/Rivet/Projections/IsolationProjection.hh
   trunk/include/Rivet/Projections/IsolationTools.hh
   trunk/include/Rivet/Projections/JetAlg.hh
   trunk/include/Rivet/Projections/JetShape.hh
   trunk/include/Rivet/Projections/KtJets.hh
   trunk/include/Rivet/Projections/LeadingParticlesFinalState.hh
   trunk/include/Rivet/Projections/LossyFinalState.hh
   trunk/include/Rivet/Projections/Multiplicity.hh
   trunk/include/Rivet/Projections/PVertex.hh
   trunk/include/Rivet/Projections/ParisiTensor.hh
   trunk/include/Rivet/Projections/SVertex.hh
   trunk/include/Rivet/Projections/Sphericity.hh
   trunk/include/Rivet/Projections/Thrust.hh
   trunk/include/Rivet/Projections/TotalVisibleMomentum.hh
   trunk/include/Rivet/Projections/TriggerCDFRun0Run1.hh
   trunk/include/Rivet/Projections/TriggerUA5.hh
   trunk/include/Rivet/Projections/UnstableFinalState.hh
   trunk/include/Rivet/Projections/VetoedFinalState.hh
   trunk/include/Rivet/Projections/VisibleFinalState.hh
   trunk/include/Rivet/Projections/WFinder.hh
   trunk/include/Rivet/Projections/ZFinder.hh
   trunk/include/Rivet/RivetAIDA.hh
   trunk/include/Rivet/Run.hh
   trunk/include/Rivet/Tools/BinnedHistogram.hh
   trunk/include/Rivet/Tools/Configuration.hh
   trunk/include/Rivet/Tools/Logging.hh
   trunk/include/Rivet/Tools/ParticleIdUtils.hh
   trunk/include/Rivet/Tools/TypeTraits.hh
   trunk/include/Rivet/Tools/Utils.hh
   trunk/include/Rivet/Tools/osdir.hh
   trunk/include/TinyXML/tinyxml.h
   trunk/src/Analyses/ALEPH_1991_S2435284.cc
   trunk/src/Analyses/ALEPH_1996_S3196992.cc
   trunk/src/Analyses/ALEPH_1996_S3486095.cc
   trunk/src/Analyses/CDF_1988_S1865951.cc
   trunk/src/Analyses/CDF_1990_S2089246.cc
   trunk/src/Analyses/CDF_1994_S2952106.cc
   trunk/src/Analyses/CDF_1996_S3108457.cc
   trunk/src/Analyses/CDF_1996_S3349578.cc
   trunk/src/Analyses/CDF_1996_S3418421.cc
   trunk/src/Analyses/CDF_1998_S3618439.cc
   trunk/src/Analyses/CDF_2000_S4155203.cc
   trunk/src/Analyses/CDF_2000_S4266730.cc
   trunk/src/Analyses/CDF_2001_S4517016.cc
   trunk/src/Analyses/CDF_2001_S4563131.cc
   trunk/src/Analyses/CDF_2001_S4751469.cc
   trunk/src/Analyses/CDF_2002_S4796047.cc
   trunk/src/Analyses/CDF_2004_S5839831.cc
   trunk/src/Analyses/CDF_2005_S6080774.cc
   trunk/src/Analyses/CDF_2005_S6217184.cc
   trunk/src/Analyses/CDF_2006_S6450792.cc
   trunk/src/Analyses/CDF_2006_S6653332.cc
   trunk/src/Analyses/CDF_2007_S7057202.cc
   trunk/src/Analyses/CDF_2008_NOTE_9351.cc
   trunk/src/Analyses/CDF_2008_S7540469.cc
   trunk/src/Analyses/CDF_2008_S7541902.cc
   trunk/src/Analyses/CDF_2008_S7782535.cc
   trunk/src/Analyses/CDF_2008_S7828950.cc
   trunk/src/Analyses/CDF_2008_S8093652.cc
   trunk/src/Analyses/CDF_2008_S8095620.cc
   trunk/src/Analyses/CDF_2009_S8057893.cc
   trunk/src/Analyses/CDF_2009_S8233977.cc
   trunk/src/Analyses/CDF_2009_S8383952.cc
   trunk/src/Analyses/CDF_2009_S8436959.cc
   trunk/src/Analyses/D0_1996_S3214044.cc
   trunk/src/Analyses/D0_1996_S3324664.cc
   trunk/src/Analyses/D0_2001_S4674421.cc
   trunk/src/Analyses/D0_2004_S5992206.cc
   trunk/src/Analyses/D0_2006_S6438750.cc
   trunk/src/Analyses/D0_2007_S7075677.cc
   trunk/src/Analyses/D0_2008_S6879055.cc
   trunk/src/Analyses/D0_2008_S7554427.cc
   trunk/src/Analyses/D0_2008_S7662670.cc
   trunk/src/Analyses/D0_2008_S7719523.cc
   trunk/src/Analyses/D0_2008_S7837160.cc
   trunk/src/Analyses/D0_2008_S7863608.cc
   trunk/src/Analyses/D0_2009_S8202443.cc
   trunk/src/Analyses/D0_2009_S8320160.cc
   trunk/src/Analyses/D0_2009_S8349509.cc
   trunk/src/Analyses/DELPHI_1995_S3137023.cc
   trunk/src/Analyses/DELPHI_1996_S3430090.cc
   trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc
   trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc
   trunk/src/Analyses/E735_1998_S3905616.cc
   trunk/src/Analyses/ExampleAnalysis.cc
   trunk/src/Analyses/H1_1994_S2919893.cc
   trunk/src/Analyses/H1_1995_S3167097.cc
   trunk/src/Analyses/H1_2000_S4129130.cc
   trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc
   trunk/src/Analyses/MC_JetAnalysis.cc
   trunk/src/Analyses/MC_LHC_DIJET.cc
   trunk/src/Analyses/MC_LHC_DIPHOTON.cc
   trunk/src/Analyses/MC_LHC_LEADINGJETS.cc
   trunk/src/Analyses/MC_LHC_PHOTONJETUE.cc
   trunk/src/Analyses/MC_LHC_SUSY.cc
   trunk/src/Analyses/MC_LHC_TTBAR.cc
   trunk/src/Analyses/MC_LHC_WANALYSIS.cc
   trunk/src/Analyses/MC_LHC_ZANALYSIS.cc
   trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc
   trunk/src/Analyses/MC_TVT1960_ZJETS.cc
   trunk/src/Analyses/Makefile.am
   trunk/src/Analyses/OPAL_1998_S3780481.cc
   trunk/src/Analyses/OPAL_2004_S6132243.cc
   trunk/src/Analyses/PDG_Hadron_Multiplicities.cc
   trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc
   trunk/src/Analyses/SFM_1984_S1178091.cc
   trunk/src/Analyses/STAR_2008_S7993412.cc
   trunk/src/Analyses/UA1_1990_S2044935.cc
   trunk/src/Analyses/UA5_1982_S875503.cc
   trunk/src/Analyses/UA5_1986_S1583476.cc
   trunk/src/Analyses/UA5_1988_S1867512.cc
   trunk/src/Analyses/UA5_1989_S1926373.cc
   trunk/src/Analyses/ZEUS_2001_S4815815.cc
   trunk/src/Analysis.cc
   trunk/src/AnalysisHandler.cc
   trunk/src/AnalysisInfo.cc
   trunk/src/AnalysisLoader.cc
   trunk/src/Cuts.cc
   trunk/src/Event.cc
   trunk/src/HistoHandler.cc
   trunk/src/Jet.cc
   trunk/src/Makefile.am
   trunk/src/Projection.cc
   trunk/src/ProjectionApplier.cc
   trunk/src/ProjectionHandler.cc
   trunk/src/Projections/Beam.cc
   trunk/src/Projections/ChargedFinalState.cc
   trunk/src/Projections/DISKinematics.cc
   trunk/src/Projections/DISLepton.cc
   trunk/src/Projections/FinalState.cc
   trunk/src/Projections/FoxWolframMoments.cc
   trunk/src/Projections/HadronicFinalState.cc
   trunk/src/Projections/IdentifiedFinalState.cc
   trunk/src/Projections/InitialQuarks.cc
   trunk/src/Projections/InvMassFinalState.cc
   trunk/src/Projections/JetAlg.cc
   trunk/src/Projections/JetShape.cc
   trunk/src/Projections/KtJets.cc
   trunk/src/Projections/LeadingParticlesFinalState.cc
   trunk/src/Projections/LossyFinalState.cc
   trunk/src/Projections/PVertex.cc
   trunk/src/Projections/SVertex.cc
   trunk/src/Projections/Sphericity.cc
   trunk/src/Projections/Thrust.cc
   trunk/src/Projections/TriggerCDFRun0Run1.cc
   trunk/src/Projections/TriggerUA5.cc
   trunk/src/Projections/UnstableFinalState.cc
   trunk/src/Projections/VetoedFinalState.cc
   trunk/src/Projections/VisibleFinalState.cc
   trunk/src/Projections/WFinder.cc
   trunk/src/Projections/ZFinder.cc
   trunk/src/Run.cc
   trunk/src/Test/testBoost.cc
   trunk/src/Test/testMatVec.cc
   trunk/src/Tools/BinnedHistogram.cc
   trunk/src/Tools/Logging.cc
   trunk/src/Tools/ParticleIdUtils.cc
   trunk/src/Tools/ParticleName.cc
   trunk/src/Tools/RivetAIDA.cc
   trunk/src/Tools/TinyXML/tinyxml.cpp
   trunk/src/Tools/TinyXML/tinyxmlerror.cpp
   trunk/src/Tools/TinyXML/tinyxmlparser.cpp
   trunk/src/Tools/Utils.cc

Modified: trunk/include/LWH/AIBaseHistogram.h
==============================================================================
--- trunk/include/LWH/AIBaseHistogram.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIBaseHistogram.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,6 +1,6 @@
 // -*- C++ -*-
 #ifndef LWH_AIBaseHistogram_H
-#define LWH_AIBaseHistogram_H 
+#define LWH_AIBaseHistogram_H
 //
 
 #ifndef LWH_USING_AIDA

Modified: trunk/include/LWH/AIDataPoint.h
==============================================================================
--- trunk/include/LWH/AIDataPoint.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIDataPoint.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,14 +13,14 @@
 /**
  * Basic user-level interface class for holding and managing
  * a single set of "measurements".
- * 
+ *
  * @author The AIDA team (http://aida.freehep.org/)
  *
  */
 
 class IDataPoint {
 
-public: 
+public:
     virtual ~IDataPoint() {}
     virtual int dimension() const = 0;
     virtual IMeasurement * coordinate(int coord) = 0;

Modified: trunk/include/LWH/AIDataPointSet.h
==============================================================================
--- trunk/include/LWH/AIDataPointSet.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIDataPointSet.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,7 +21,7 @@
 
   class IDataPointSet {
 
-  public: 
+  public:
     virtual ~IDataPointSet() { /* nop */; }
     virtual IAnnotation & annotation() = 0;
     virtual const IAnnotation & annotation() const = 0;

Modified: trunk/include/LWH/AIHistogram1D.h
==============================================================================
--- trunk/include/LWH/AIHistogram1D.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIHistogram1D.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,7 +15,7 @@
 
 class IHistogram : virtual public IBaseHistogram {
 
-public: 
+public:
 
   virtual ~IHistogram() {}
 

Modified: trunk/include/LWH/AIMeasurement.h
==============================================================================
--- trunk/include/LWH/AIMeasurement.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIMeasurement.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,7 +10,7 @@
 
 class IMeasurement {
 
-public: 
+public:
     virtual ~IMeasurement() {}
     virtual double value() const = 0;
     virtual double errorPlus() const = 0;

Modified: trunk/include/LWH/AIProfile1D.h
==============================================================================
--- trunk/include/LWH/AIProfile1D.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AIProfile1D.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,6 +1,6 @@
 // -*- C++ -*-
 #ifndef LWH_AIProfile1D_H
-#define LWH_AIProfile1D_H 
+#define LWH_AIProfile1D_H
 //
 
 #ifndef LWH_USING_AIDA
@@ -31,7 +31,7 @@
 
 class IProfile1D : virtual public IProfile {
 
-public: 
+public:
 
     virtual ~IProfile1D() {}
 

Modified: trunk/include/LWH/AnalysisFactory.h
==============================================================================
--- trunk/include/LWH/AnalysisFactory.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/AnalysisFactory.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -36,7 +36,7 @@
  */
 class AnalysisFactory: public IAnalysisFactory {
 
-public: 
+public:
   /// Destructor.
   virtual ~AnalysisFactory() {
     clear();

Modified: trunk/include/LWH/Axis.h
==============================================================================
--- trunk/include/LWH/Axis.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/Axis.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -61,7 +61,7 @@
    */
   double upperEdge() const { return upper; }
 
-  /** 
+  /**
    * The number of bins (excluding underflow and overflow) on the IAxis.
    * @return The IAxis's number of bins.
    *
@@ -88,7 +88,7 @@
    * @return The upper edge of the corresponding bin; for the overflow
    * bin this is <tt>Double.POSITIVE_INFINITY</tt>.
    *
-   */ 
+   */
   double binUpperEdge(int index) const {
     return index >= nbins? std::numeric_limits<double>::max():
       lower + double(std::max(index, -1) + 1)*binWidth(0);
@@ -100,7 +100,7 @@
    * or OVERFLOW or UNDERFLOW.
    * @return      The width of the corresponding bin.
    *
-   */ 
+   */
   double binWidth(int) const {
     return (upper - lower)/double(nbins);
   }

Modified: trunk/include/LWH/DataPointSet.h
==============================================================================
--- trunk/include/LWH/DataPointSet.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/DataPointSet.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -317,7 +317,7 @@
   /**
    * Not implemented in LWH.
    * @return null pointer always.
-   */ 
+   */
   void * cast(const std::string &) const {
     return 0;
   }
@@ -411,9 +411,9 @@
       eyh.push_back(point(i)->coordinate(1)->errorPlus());
     }
         
-    TGraphAsymmErrors* graph = new TGraphAsymmErrors(N, &(x[0]), &(y[0]), 
-                                                     &(exl[0]), &(exh[0]), 
-                                                     &(eyl[0]), &(eyh[0]) ); 
+    TGraphAsymmErrors* graph = new TGraphAsymmErrors(N, &(x[0]), &(y[0]),
+                                                     &(exl[0]), &(exh[0]),
+                                                     &(eyl[0]), &(eyh[0]) );
         
     graph->SetTitle(title().c_str());
     graph->SetName(name.c_str());

Modified: trunk/include/LWH/DataPointSetFactory.h
==============================================================================
--- trunk/include/LWH/DataPointSetFactory.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/DataPointSetFactory.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -24,7 +24,7 @@
  */
 class DataPointSetFactory: public IDataPointSetFactory {
 
-public: 
+public:
 
   /**
    * Standard constructor.
@@ -299,7 +299,7 @@
          !dset->setCoordinate(1, y, eyp, eym) )
       throw std::runtime_error("LWH could add points to DataPointSet '" +
 			       title +  "'." );
-    return dset;   
+    return dset;
   }
 
   /**
@@ -405,7 +405,7 @@
          !dset->setCoordinate(2, z, ezp, ezm) )
       throw std::runtime_error("LWH could add points to DataPointSet '" +
 			       title +  "'." );
-    return dset;   
+    return dset;
   }
 
   /**

Modified: trunk/include/LWH/Histogram1D.h
==============================================================================
--- trunk/include/LWH/Histogram1D.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/Histogram1D.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -117,7 +117,7 @@
     /**
      * Get the Histogram's dimension.
      * @return The Histogram's dimension.
-     */ 
+     */
     int dimension() const {
       return 1;
     }
@@ -139,7 +139,7 @@
      * Get the number of in-range entries in the Histogram.
      * @return The number of in-range entries.
      *
-     */ 
+     */
     int entries() const {
       int si = 0;
       for ( int i = 2; i < ax->bins() + 2; ++i ) si += sum[i];
@@ -249,7 +249,7 @@
     }
 
     /**
-     * The weighted mean of a bin. 
+     * The weighted mean of a bin.
      * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
      * @return      The mean of the corresponding bin.
      */
@@ -260,7 +260,7 @@
     };
 
     /**
-     * The weighted RMS of a bin. 
+     * The weighted RMS of a bin.
      * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
      * @return      The RMS of the corresponding bin.
      */
@@ -274,7 +274,7 @@
      * Number of entries in the corresponding bin (ie the number of
      * times fill was called for this bin).
      * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
-     * @return      The number of entries in the corresponding bin. 
+     * @return      The number of entries in the corresponding bin.
      */
     int binEntries(int index) const {
       return sum[index + 2];
@@ -427,7 +427,7 @@
     /**
      * Not implemented in LWH.
      * @return null pointer always.
-     */ 
+     */
     void * cast(const std::string &) const {
       return 0;
     }
@@ -511,15 +511,15 @@
         double* bins = new double[nbins+1];
         for (int i=0; i<nbins; ++i) {
       bins[i] = vax->binEdges(i).first;
-        } 
-        bins[nbins] = vax->binEdges(nbins-1).second; //take last bin right border 
+        }
+        bins[nbins] = vax->binEdges(nbins-1).second; //take last bin right border
         hist1d = new TH1D(name.c_str(), title().c_str(), nbins, bins);
         delete bins;
       }
 
 
       double entries = 0;
-      for ( int i = 0; i < nbins + 2; ++i ) { 
+      for ( int i = 0; i < nbins + 2; ++i ) {
         if ( sum[i] ) {
           //i==0: underflow->RootBin(0), i==1: overflow->RootBin(NBins+1)
           entries = entries + sum[i];

Modified: trunk/include/LWH/Measurement.h
==============================================================================
--- trunk/include/LWH/Measurement.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/Measurement.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -22,7 +22,7 @@
  */
 class Measurement: public IMeasurement {
 
-public: 
+public:
 
   /**
    * Standard constructor.

Modified: trunk/include/LWH/Profile1D.h
==============================================================================
--- trunk/include/LWH/Profile1D.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/Profile1D.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,6 +1,6 @@
 // -*- C++ -*-
 #ifndef LWH_Profile1D_H
-#define LWH_Profile1D_H 
+#define LWH_Profile1D_H
 //
 // This is the declaration of the Profile1D class.
 //
@@ -63,7 +63,7 @@
   Profile1D(const Profile1D & h)
     : IBaseHistogram(h), IProfile(h), IProfile1D(h), ManagedObject(h),
       fax(0), vax(0), sum(h.sum), sumw(h.sumw), sumw2(h.sumw2),
-      sumxw(h.sumxw), sumx2w(h.sumx2w), sumyw(h.sumyw), sumy2w(h.sumy2w), 
+      sumxw(h.sumxw), sumx2w(h.sumx2w), sumyw(h.sumyw), sumy2w(h.sumy2w),
       sumy2w2(h.sumy2w2) {
     const VariAxis * hvax = dynamic_cast<const VariAxis *>(h.ax);
     if ( hvax ) ax = vax = new VariAxis(*hvax);
@@ -148,7 +148,7 @@
    * Get the number of in-range entries in the Histogram.
    * @return The number of in-range entries.
    *
-   */ 
+   */
   int entries() const {
     int si = 0;
     for ( int i = 2; i < ax->bins() + 2; ++i ) si += sum[i];
@@ -300,7 +300,7 @@
   }
 
   /**
-   * The weighted mean of a bin. 
+   * The weighted mean of a bin.
    * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
    * @return      The mean in x of the corresponding bin.
    */
@@ -311,7 +311,7 @@
   };
 
   /**
-   * The weighted RMS of a bin. 
+   * The weighted RMS of a bin.
    * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
    * @return      The RMS in x of the corresponding bin.
    */
@@ -325,7 +325,7 @@
    * Number of entries in the corresponding bin (ie the number of
    * times fill was called for this bin).
    * @param index The bin number (0...N-1) or OVERFLOW or UNDERFLOW.
-   * @return      The number of entries in the corresponding bin. 
+   * @return      The number of entries in the corresponding bin.
    */
   int binEntries(int index) const {
     return sum[index + 2];
@@ -339,7 +339,7 @@
    */
   double binHeight(int index) const {
     double bH = 0.;
-    if (sumw[index+2] > 0. && sumyw[index+2] > 0.) 
+    if (sumw[index+2] > 0. && sumyw[index+2] > 0.)
       bH = sumyw[index+2]/sumw[index+2];
     return bH;
   }
@@ -353,7 +353,7 @@
   double binError(int index) const {
     if (sumw[index+2] > 0.0) {
       double binErr2 = sumy2w[index+2]*sumw[index+2] - sumyw[index+2]*sumyw[index+2];
-      binErr2 /= sumw[index+2]*sumw[index+2] - sumw2[index+2]; 
+      binErr2 /= sumw[index+2]*sumw[index+2] - sumw2[index+2];
       binErr2 /= sumw[index+2]; //< s_hat ~ s/sqrt(N)
       if (binErr2 >= 0.0) return sqrt(binErr2);
     }
@@ -461,7 +461,7 @@
   /**
    * Not implemented in LWH.
    * @return null pointer always.
-   */ 
+   */
   void * cast(const std::string &) const {
     return 0;
   }
@@ -490,7 +490,7 @@
        << "\">\n      <statistic mean=\"" << mean()
        << "\" direction=\"x\"\n        rms=\"" << rms()
        << "\"/>\n    </statistics>\n    <data1d>\n";
-    for ( int i = 0; i < ax->bins() + 2; ++i ) 
+    for ( int i = 0; i < ax->bins() + 2; ++i )
       if ( sum[i] && binError(i)>0.) {
         os << "      <bin1d binNum=\"";
         if ( i == 0 ) os << "UNDERFLOW";
@@ -547,15 +547,15 @@
       double* bins = new double[nbins+1];
       for (int i=0; i<nbins; ++i) {
 	bins[i] = vax->binEdges(i).first;
-      } 
-      bins[nbins] = vax->binEdges(nbins-1).second; //take last bin right border 
+      }
+      bins[nbins] = vax->binEdges(nbins-1).second; //take last bin right border
       prof1d = new TProfile(name.c_str(), title().c_str(), nbins, bins);
       delete bins;
     }
 
 
     double entries = 0;
-    for ( int i = 0; i < nbins + 2; ++i ) { 
+    for ( int i = 0; i < nbins + 2; ++i ) {
       if ( sum[i] && binError(i)>0.) {
 	//i==0: underflow->RootBin(0), i==1: overflow->RootBin(NBins+1)
 	entries = entries + sum[i];

Modified: trunk/include/LWH/Tree.h
==============================================================================
--- trunk/include/LWH/Tree.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/Tree.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,7 +21,7 @@
   
   
   enum fileformat {
-    flat, 
+    flat,
     xml
     #ifdef HAVE_ROOT
     , root
@@ -132,7 +132,7 @@
       //PathSet::iterator theIterator;
       //for( theIterator = dirs.begin(); theIterator != dirs.end(); theIterator++ ) {
       //std::cout << "1:" << pth2str(*theIterator);
-      //}   
+      //}
       //std::cout << std::endl;
       
       if ( dirs.find(path) == dirs.end() ) {
@@ -161,7 +161,7 @@
       return cwd;
     }
     
-    /** 
+    /**
      * List, into a given output stream, all the IManagedObjects, including
      * directories (but not "." and ".."), in a given path. Directories end
      * with "/". The list can be recursive.
@@ -236,7 +236,7 @@
      * @param dir The absolute or relative path of the new directory.
      * @return false If a subdirectory within the path does
      * not exist or it is not a directory. Also if the directory already exists.
-     */   
+     */
     bool mkdir(const std::string & dir) {
       Path p = purgepath(str2pth(fullpath(sts(dir))));
       Path base = p;
@@ -327,7 +327,7 @@
       //std::cout << 1 << std::endl;
       if ( it == objs.end() ) return false;
       //std::cout << 2 << std::endl;
-      // Changed from != by AB: surely the directory you're copying to must 
+      // Changed from != by AB: surely the directory you're copying to must
       // exist? Why can't we just change the name in the same directory?
       if ( dirs.find(newpath) == dirs.end() ) return false;
       newpath.push_back(oldpath.back());
@@ -373,8 +373,8 @@
           break;
         case xml:
           o->writeXML(of, path, name);
-          break; 
-          #ifdef HAVE_ROOT    
+          break;
+          #ifdef HAVE_ROOT
         case root:
           o->writeROOT(file, path, name);
           break;
@@ -440,7 +440,7 @@
     /**
      * Not implemented in LWH.
      * @return null pointer always.
-     */ 
+     */
     void * cast(const std::string &) const {
       return 0;
     }

Modified: trunk/include/LWH/VariAxis.h
==============================================================================
--- trunk/include/LWH/VariAxis.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/LWH/VariAxis.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -72,7 +72,7 @@
     return (--last)->first;
   }
 
-  /** 
+  /**
    * The number of bins (excluding underflow and overflow) on the IAxis.
    * @return The IAxis's number of bins.
    *
@@ -119,7 +119,7 @@
    * @return The upper edge of the corresponding bin; for the overflow
    * bin this is <tt>Double.POSITIVE_INFINITY</tt>.
    *
-   */ 
+   */
   double binUpperEdge(int index) const {
     return binEdges(index).second;
   }
@@ -130,7 +130,7 @@
    * or OVERFLOW or UNDERFLOW.
    * @return      The width of the corresponding bin.
    *
-   */ 
+   */
   double binWidth(int index) const {
     std::pair<double,double> edges = binEdges(index);
     return edges.second - edges.first;

Modified: trunk/include/Rivet/Analyses/MC_JetAnalysis.hh
==============================================================================
--- trunk/include/Rivet/Analyses/MC_JetAnalysis.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Analyses/MC_JetAnalysis.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,19 +18,19 @@
 
 
     /// @name Analysis methods
-    //@{ 
+    //@{
     virtual void init();
     virtual void analyze(const Event& event);
     virtual void finalize();
     //@}
-  
+
   protected:
-    
+ 
     /// The energy scale and number of jets for which histograms are to be
     /// initialised
     double m_sqrts;
     size_t m_njet;
-    
+ 
     /// The name of the jet projection to be used for this analysis
     /// (this projection has to be registered by the derived analysis!)
     const std::string m_jetpro_name;

Modified: trunk/include/Rivet/Analysis.hh
==============================================================================
--- trunk/include/Rivet/Analysis.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Analysis.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -45,7 +45,7 @@
   /// histograms. Writing the histograms to a file is, however, done by
   /// the Rivet class.
   class Analysis : public ProjectionApplier {
-    
+ 
     /// The AnalysisHandler is a friend.
     friend class AnalysisHandler;
 
@@ -126,11 +126,11 @@
 
     /// @brief Information about the events needed as input for this analysis.
     ///
-    /// Event types, energies, kinematic cuts, particles to be considered 
+    /// Event types, energies, kinematic cuts, particles to be considered
     /// stable, etc. etc. Should be treated as a restructuredText bullet list
     /// (http://docutils.sourceforge.net/rst.html)
     virtual std::string runInfo() const;
-    
+ 
     /// Experiment which performed and published this analysis.
     virtual std::string experiment() const;
 
@@ -142,7 +142,7 @@
 
     /// @brief When the original experimental analysis was published.
     ///
-    /// When the refereed paper on which this is based was published, 
+    /// When the refereed paper on which this is based was published,
     /// according to SPIRES.
     virtual std::string year() const;
 
@@ -168,17 +168,17 @@
     /// Access the controlling AnalysisHandler object.
     AnalysisHandler& handler() const;
 
-    /// Normalize the given histogram, @a histo. After this call the 
-    /// histogram will have been transformed to a DataPointSet with the 
-    /// same name and path. It has the same effect as 
+    /// Normalize the given histogram, @a histo. After this call the
+    /// histogram will have been transformed to a DataPointSet with the
+    /// same name and path. It has the same effect as
     /// @c scale(histo, norm/sumOfWeights).
     /// @param histo The histogram to be normalised.
     /// @param norm The new area of the histogram.
     /// @warning The old histogram will be deleted, and its pointer set to zero.
     void normalize(AIDA::IHistogram1D*& histo, const double norm=1.0);
 
-    /// Multiplicatively scale the given histogram, @a histo. After this call the 
-    /// histogram will have been transformed to a DataPointSet with the same name and path.  
+    /// Multiplicatively scale the given histogram, @a histo. After this call the
+    /// histogram will have been transformed to a DataPointSet with the same name and path.
     /// @param histo The histogram to be scaled.
     /// @param scale The factor used to multiply the histogram bin heights.
     /// @warning The old histogram will be deleted, and its pointer set to zero.
@@ -188,13 +188,13 @@
     Analysis& setCrossSection(const double& xs);
 
     /// Return true if this analysis needs to know the process cross-section.
-    bool needsCrossSection() const;    
+    bool needsCrossSection() const;
 
   protected:
 
     /// Get the process cross-section in pb. Throws if this hasn't been set.
     const double& crossSection() const;
-    
+ 
 
     /// Get a Log object based on the name() property of the calling analysis object.
     Log& getLog() const;
@@ -206,7 +206,7 @@
     /// Get the sum of event weights seen (via the analysis handler). Use in the
     /// finalize phase only.
     double sumOfWeights() const;
-    
+ 
 
   protected:
     /// @name AIDA analysis infrastructure.
@@ -235,20 +235,20 @@
     //@{
 
     /// Book a 1D histogram with @a nbins uniformly distributed across the range @a lower - @a upper .
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IHistogram1D* bookHistogram1D(const std::string& name,
                                         const size_t nbins, const double lower, const double upper,
-                                        const std::string& title="", 
+                                        const std::string& title="",
                                         const std::string& xtitle="", const std::string& ytitle="");
 
     /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges .
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IHistogram1D* bookHistogram1D(const std::string& name,
-                                        const std::vector<double>& binedges, const std::string& title="", 
+                                        const std::vector<double>& binedges, const std::string& title="",
                                         const std::string& xtitle="", const std::string& ytitle="");
 
     /// Book a 1D histogram based on the name in the corresponding AIDA
@@ -260,7 +260,7 @@
     /// Book a 1D histogram based on the paper, dataset and x/y-axis IDs in the corresponding
     /// HepData record. The binnings will be obtained by reading the bundled AIDA data record file
     /// of the same filename as the analysis' name() property.
-    AIDA::IHistogram1D* bookHistogram1D(const size_t datasetId, const size_t xAxisId, 
+    AIDA::IHistogram1D* bookHistogram1D(const size_t datasetId, const size_t xAxisId,
                                         const size_t yAxisId, const std::string& title="",
                                         const std::string& xtitle="", const std::string& ytitle="");
 
@@ -271,21 +271,21 @@
     //@{
 
     /// Book a 1D profile histogram with @a nbins uniformly distributed across the range @a lower - @a upper .
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IProfile1D* bookProfile1D(const std::string& name,
                                     const size_t nbins, const double lower, const double upper,
-                                    const std::string& title="", 
+                                    const std::string& title="",
                                     const std::string& xtitle="", const std::string& ytitle="");
 
     /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges .
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IProfile1D* bookProfile1D(const std::string& name,
                                     const std::vector<double>& binedges,
-                                    const std::string& title="", 
+                                    const std::string& title="",
                                     const std::string& xtitle="", const std::string& ytitle="");
 
     /// Book a 1D profile histogram based on the name in the corresponding AIDA
@@ -293,11 +293,11 @@
     /// record file with the same filename as the analysis' name() property.
     AIDA::IProfile1D* bookProfile1D(const std::string& name, const std::string& title="",
                                     const std::string& xtitle="", const std::string& ytitle="");
-    
+ 
     /// Book a 1D profile histogram based on the paper, dataset and x/y-axis IDs in the corresponding
     /// HepData record. The binnings will be obtained by reading the bundled AIDA data record file
     /// of the same filename as the analysis' name() property.
-    AIDA::IProfile1D* bookProfile1D(const size_t datasetId, const size_t xAxisId, 
+    AIDA::IProfile1D* bookProfile1D(const size_t datasetId, const size_t xAxisId,
                                     const size_t yAxisId, const std::string& title="",
                                     const std::string& xtitle="", const std::string& ytitle="");
     //@}
@@ -307,16 +307,16 @@
     //@{
 
     /// Book a 2-dimensional data point set.
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IDataPointSet* bookDataPointSet(const std::string& name, const std::string& title="",
                                           const std::string& xtitle="", const std::string& ytitle="");
 
 
     /// Book a 2-dimensional data point set with equally spaced points in a range.
-    /// (NB. this returns a pointer rather than a reference since it will 
-    /// have to be stored in the analysis class - there's no point in forcing users to explicitly 
+    /// (NB. this returns a pointer rather than a reference since it will
+    /// have to be stored in the analysis class - there's no point in forcing users to explicitly
     /// get the pointer from a reference before they can use it!)
     AIDA::IDataPointSet* bookDataPointSet(const std::string& name,
                                           const size_t npts, const double lower, const double upper,
@@ -332,7 +332,7 @@
     /// Book a 2-dimensional data point set based on the paper, dataset and x/y-axis IDs in the corresponding
     /// HepData record. The binnings (x-errors) will be obtained by reading the bundled AIDA data record file
     /// of the same filename as the analysis' name() property.
-    AIDA::IDataPointSet* bookDataPointSet(const size_t datasetId, const size_t xAxisId, 
+    AIDA::IDataPointSet* bookDataPointSet(const size_t datasetId, const size_t xAxisId,
                                           const size_t yAxisId, const std::string& title="",
                                           const std::string& xtitle="", const std::string& ytitle="");
     //@}
@@ -374,7 +374,7 @@
     /// Pointer to analysis metadata object
     shared_ptr<AnalysisInfo> _info;
 
-    
+ 
   private:
 
     /// @name Cross-section variables
@@ -383,7 +383,7 @@
     bool _gotCrossSection;
     bool _needsCrossSection;
     //@}
-    
+ 
     /// Allowed beam-type pair.
     BeamPair _beams;
 
@@ -393,12 +393,12 @@
     /// Flag to indicate whether the histogram directory is present
     bool _madeHistoDir;
 
-    /// Collection of x-axis point data to speed up many autobookings: the 
+    /// Collection of x-axis point data to speed up many autobookings: the
     /// reference data file should only be read once.
     /// @todo Reduce memory occupancy, or clear after initialisation?
     map<string, vector<DPSXPoint> > _dpsData;
 
-    /// Collection of cached bin edges to speed up many autobookings: the 
+    /// Collection of cached bin edges to speed up many autobookings: the
     /// reference data file should only be read once.
     /// @todo Reduce memory occupancy, or clear after initialisation?
     map<string, BinEdges> _histBinEdges;
@@ -420,7 +420,7 @@
   public:
     AnalysisBuilderBase() { }
     virtual ~AnalysisBuilderBase() { }
-    
+ 
     virtual Analysis* mkAnalysis() const = 0;
 
     const string name() const {

Modified: trunk/include/Rivet/AnalysisHandler.hh
==============================================================================
--- trunk/include/Rivet/AnalysisHandler.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/AnalysisHandler.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,7 +12,7 @@
 namespace Rivet {
 
 
-  /// A class which handles a number of analysis objects to be applied to 
+  /// A class which handles a number of analysis objects to be applied to
   /// generated events. An {@link Analysis}' AnalysisHandler is also responsible
   /// for handling the final writing-out of histograms.
   class AnalysisHandler {
@@ -22,7 +22,7 @@
     /// @name Standard constructors and destructors. */
     //@{
     /// The standard constructor.
-    /// @param basefilename the name of the file (no extension) where histograms 
+    /// @param basefilename the name of the file (no extension) where histograms
     ///   are to be stored.
     /// @param runname optional name of this run, prepended to AIDA data paths.
     /// @param storetype a string indicating to the AIDA analysis factory
@@ -32,11 +32,11 @@
     /// @param afac an AIDA analysis factory object. The caller must make
     ///   sure that the lifetime of the factory object exceeds the AnalysisHandler
     ///   object.
-    AnalysisHandler(AIDA::IAnalysisFactory& afac, string basefilename="Rivet", 
+    AnalysisHandler(AIDA::IAnalysisFactory& afac, string basefilename="Rivet",
                     string runname="", HistoFormat storetype=AIDAML);
 
     /// Make a Rivet handler with a set base filename and store type.
-    AnalysisHandler(string basefilename="Rivet", 
+    AnalysisHandler(string basefilename="Rivet",
                     string runname="", HistoFormat storetype=AIDAML);
 
     /// The destructor is not virtual as this class should not be inherited from.
@@ -74,12 +74,12 @@
     /// the analyses are run for a sub-contribution of the events
     /// (but of course have to be normalised to the total sum of weights)
     void setSumOfWeights(const double& sum);
-      
+   
 
     /// Get a list of the currently registered analyses' names.
     std::vector<std::string> analysisNames();
 
-    /// Add an analysis to the run list using its name. The actual Analysis 
+    /// Add an analysis to the run list using its name. The actual Analysis
     /// to be used will be obtained via AnalysisHandler::getAnalysis(string).
     /// If no matching analysis is found, no analysis is added (i.e. the
     /// null pointer is checked and discarded.
@@ -137,12 +137,12 @@
 
     /// Commit the AIDA tree to file.
     void commitData();
-    
+ 
 
     /// The AIDA tree object.
     AIDA::ITree& tree();
 
-    
+ 
     /// The AIDA histogram factory.
     AIDA::IHistogramFactory& histogramFactory();
 
@@ -155,7 +155,7 @@
     bool needCrossSection() const;
 
 
-    /// Set the cross-section for the process being generated.    
+    /// Set the cross-section for the process being generated.
     AnalysisHandler& setCrossSection(double xs);
 
 
@@ -166,7 +166,7 @@
 
     /// Run name
     std::string _runname;
-    
+ 
     /// If non-zero the number of runs to be combined into one analysis.
     int _nRun;
 

Modified: trunk/include/Rivet/AnalysisInfo.hh
==============================================================================
--- trunk/include/Rivet/AnalysisInfo.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/AnalysisInfo.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,7 +9,7 @@
 
 
   class AnalysisInfo {
-    
+ 
   public:
 
     /// Static factory method: returns null pointer if no metadata found
@@ -32,8 +32,8 @@
     /// building web pages and the analysis pages in the Rivet manual.
     //@{
     /// Get the name of the analysis. By default this is computed by
-    /// combining the results of the experiment, year and Spires ID 
-    /// metadata methods and you should only override it if there's a 
+    /// combining the results of the experiment, year and Spires ID
+    /// metadata methods and you should only override it if there's a
     /// good reason why those won't work.
     std::string name() const {
       if (!_name.empty()) return _name;
@@ -65,11 +65,11 @@
     const std::string& description() const { return _description; }
 
     /// @brief Information about the events needed as input for this analysis.
-    /// Event types, energies, kinematic cuts, particles to be considered 
+    /// Event types, energies, kinematic cuts, particles to be considered
     /// stable, etc. etc. Should be treated as a restructuredText bullet list
     /// (http://docutils.sourceforge.net/rst.html)
     const std::string& runInfo() const { return _runInfo; }
-    
+ 
     /// Experiment which performed and published this analysis.
     const std::string& experiment() const { return _experiment; }
 
@@ -80,7 +80,7 @@
     // const BeamPair& beams() const { return _beams; }
 
     /// @brief When the original experimental analysis was published.
-    /// When the refereed paper on which this is based was published, 
+    /// When the refereed paper on which this is based was published,
     /// according to SPIRES.
     const std::string& year() const { return _year; }
 

Modified: trunk/include/Rivet/AnalysisLoader.hh
==============================================================================
--- trunk/include/Rivet/AnalysisLoader.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/AnalysisLoader.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,7 +6,7 @@
 #include <map>
 #include <string>
 
-namespace Rivet {  
+namespace Rivet {
 
 
   // Forward declarations
@@ -16,15 +16,15 @@
 
   class AnalysisLoader {
   public:
-    
+ 
     /// Get all the available analyses' names.
     static vector<string> analysisNames();
-    
+ 
     /// Get an analysis by name.
     /// Warning: a name arg which matches no known analysis will return a null
     /// pointer. Check your return values before using them!
     static Analysis* getAnalysis(const string& analysisname);
-    
+ 
     /// Get all the available analyses.
     static vector<Analysis*> getAllAnalyses();
 
@@ -38,13 +38,13 @@
 
     /// Load the available analyses at runtime.
     static void _loadAnalysisPlugins();
-    
+ 
     typedef map<string, const AnalysisBuilderBase*> AnalysisBuilderMap;
     static AnalysisBuilderMap _ptrs;
-    
+ 
   };
 
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/BeamConstraint.hh
==============================================================================
--- trunk/include/Rivet/BeamConstraint.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/BeamConstraint.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -19,7 +19,7 @@
   }
 
   /// Find whether BeamPair @a pair is compatible with the template
-  /// BeamPair @a allowedpair. This assesses whether either of the 
+  /// BeamPair @a allowedpair. This assesses whether either of the
   /// two possible pairings of @a pair's constituents is compatible.
   inline bool compatible(const BeamPair& pair, const BeamPair& allowedpair) {
     bool oneToOne = compatible(pair.first, allowedpair.first);
@@ -31,13 +31,13 @@
 
 
   /// Check particle compatibility of Particle pairs
-  inline bool compatible(const ParticlePair& ppair, 
+  inline bool compatible(const ParticlePair& ppair,
                          const BeamPair& allowedpair) {
-    return compatible(make_pdgid_pair(ppair.first.pdgId(), 
+    return compatible(make_pdgid_pair(ppair.first.pdgId(),
                                       ppair.second.pdgId()), allowedpair);
   }
   /// Check particle compatibility of Particle pairs (for symmetric completeness)
-  inline bool compatible(const BeamPair& allowedpair, 
+  inline bool compatible(const BeamPair& allowedpair,
                          const ParticlePair& ppair) {
     return compatible(ppair, allowedpair);
   }

Modified: trunk/include/Rivet/Cmp.hh
==============================================================================
--- trunk/include/Rivet/Cmp.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Cmp.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,8 +9,8 @@
 
 
 namespace Rivet {
-  
-  
+
+
   /// Cmp is a helper class to be used when checking the ordering of two
   /// objects. When implicitly converted to an integer the value will be
   /// negative if the two objects used in the constructor are ordered and
@@ -22,23 +22,23 @@
   /// c2)</code> where cmp is a global function for easy creation of Cmp
   /// objects.
   template <typename T>
-  class Cmp {    
+  class Cmp {
   public:
-    
+ 
     /// @name Standard constructors etc.
     //@{
     /// The default constructor.
     Cmp(const T& t1, const T& t2)
       : _value(UNDEFINED), _objects(&t1, &t2) { }
-    
+ 
     /// The copy constructor.
     template <typename U>
     Cmp(const Cmp<U>& x)
       : _value(x), _objects(0, 0) { }
-    
+ 
     /// The destructor is not virtual since this is not intended to be a base class.
     ~Cmp() { };
-    
+ 
     /// The assignment operator.
     template <typename U>
     const Cmp<T>& operator=(const Cmp<U>& x) {
@@ -46,9 +46,9 @@
       return *this;
     }
     //@}
-    
+ 
   public:
-    
+ 
     /// Automatically convert to an enum.
     operator CmpState() const {
       _compare();
@@ -60,7 +60,7 @@
       _compare();
       return _value;
     }
-    
+ 
     /// If this state is equivalent, set this state to the state of \a c.
     template <typename U>
     const Cmp<T>& operator||(const Cmp<U>& c) const {
@@ -68,9 +68,9 @@
       if (_value == EQUIVALENT) _value = c;
       return *this;
     }
-    
+ 
   private:
-    
+ 
     /// Perform the actual comparison if necessary.
     void _compare() const {
       if (_value == UNDEFINED) {
@@ -80,17 +80,17 @@
         else _value = EQUIVALENT;
       }
     }
-    
+ 
     /// The state of this object.
     mutable CmpState _value;
-    
+ 
     /// The objects to be compared.
     pair<const T*, const T*> _objects;
-    
+ 
   };
 
-  
-  
+
+
   /// Specialization of the Cmp helper class to be used when checking the
   /// ordering of two Projection objects. When implicitly converted to an
   /// integer the value will be negative if the two objects used in the
@@ -104,25 +104,25 @@
   /// c2)</code> where cmp is a global function for easy creation of Cmp
   /// objects.
   template <>
-  class Cmp<Projection> {    
+  class Cmp<Projection> {
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor.
     Cmp(const Projection& p1, const Projection& p2)
-      : _value(UNDEFINED), _objects(&p1, &p2) 
+      : _value(UNDEFINED), _objects(&p1, &p2)
     { }
-    
+ 
     /// The copy constructor.
     template <typename U>
     Cmp(const Cmp<U>& x)
-      : _value(x), _objects(0, 0) 
+      : _value(x), _objects(0, 0)
     { }
-    
+ 
     /// The destructor is not virtual since this is not intended to be a base class.
     ~Cmp() { };
-    
+ 
     /// The assignment operator.
     template <typename U>
     const Cmp<Projection>& operator=(const Cmp<U>& x) {
@@ -130,9 +130,9 @@
       return *this;
     }
     //@}
-    
+ 
   public:
-    
+ 
     /// Automatically convert to an enum.
     operator CmpState() const {
       _compare();
@@ -140,12 +140,12 @@
     }
 
 
-    /// Automatically convert to an integer. 
+    /// Automatically convert to an integer.
     operator int() const {
       _compare();
       return _value;
     }
-    
+ 
     /// If this state is equivalent, set this state to the state of \a c.
     template <typename U>
     const Cmp<Projection>& operator||(const Cmp<U>& c) const {
@@ -153,9 +153,9 @@
       if (_value == EQUIVALENT) _value = c;
       return *this;
     }
-    
+ 
   private:
-    
+ 
     /// Perform the actual comparison if necessary.
     void _compare() const {
       if (_value == UNDEFINED) {
@@ -171,15 +171,15 @@
         }
       }
     }
-    
+ 
   private:
-    
+ 
     /// The state of this object.
     mutable CmpState _value;
-    
+ 
     /// The objects to be compared.
     pair<const Projection*, const Projection*> _objects;
-    
+ 
   };
 
 
@@ -201,23 +201,23 @@
   template <>
   class Cmp<double> {
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor.
-    Cmp(const double p1, const double p2) 
+    Cmp(const double p1, const double p2)
       : _value(UNDEFINED), _numA(p1), _numB(p2)
     { }
-    
+ 
     /// The copy constructor.
     template <typename U>
     Cmp(const Cmp<U>& x)
       : _value(x), _numA(0.0), _numB(0.0)
     { }
-    
+ 
     /// The destructor is not virtual since this is not intended to be a base class.
     ~Cmp() { }
-    
+ 
     /// The assignment operator.
     template <typename U>
     const Cmp<double>& operator=(const Cmp<U>& x) {
@@ -225,21 +225,21 @@
       return *this;
     }
     //@}
-    
+ 
   public:
-    
+ 
     /// Automatically convert to an enum.
     operator CmpState() const {
       _compare();
       return _value;
     }
 
-    /// Automatically convert to an integer. 
+    /// Automatically convert to an integer.
     operator int() const {
       _compare();
       return _value;
     }
-    
+ 
     /// If this state is equivalent, set this state to the state of \a c.
     template <typename U>
     const Cmp<double>& operator||(const Cmp<U>& c) const {
@@ -247,9 +247,9 @@
       if (_value == EQUIVALENT) _value = c;
       return *this;
     }
-    
+ 
   private:
-    
+ 
     /// Perform the actual comparison if necessary.
     void _compare() const {
       if (_value == UNDEFINED) {
@@ -258,17 +258,17 @@
         else _value = UNORDERED;
       }
     }
-    
+ 
   private:
-    
+ 
     /// The state of this object.
     mutable CmpState _value;
-    
+ 
     /// The objects to be compared.
     double _numA, _numB;
-    
+ 
   };
-  
+
 
 
   ///////////////////////////////////////////////////////////////////
@@ -319,7 +319,7 @@
     assert(parent2);
     return Cmp<Projection>(parent1->getProjection(pname), parent2->getProjection(pname));
   }
-  
+
 
 }
 

Modified: trunk/include/Rivet/Constraints.hh
==============================================================================
--- trunk/include/Rivet/Constraints.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Constraints.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,5 +1,5 @@
 // -*- C++ -*-
-#ifndef RIVET_CONSTRAINTS_HH 
+#ifndef RIVET_CONSTRAINTS_HH
 #define RIVET_CONSTRAINTS_HH 1
 
 #include "Rivet/BeamConstraint.hh"

Modified: trunk/include/Rivet/Event.hh
==============================================================================
--- trunk/include/Rivet/Event.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Event.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -39,13 +39,13 @@
   public:
 
     /// Return the generated event obtained from an external event generator.
-    const GenEvent& genEvent() const { 
-      return _genEvent; 
+    const GenEvent& genEvent() const {
+      return _genEvent;
     }
 
     /// The weight associated with the event.
-    double weight() const { 
-      return _weight; 
+    double weight() const {
+      return _weight;
     }
 
 
@@ -65,7 +65,7 @@
         const Projection& pRef = **old;
         return pcast<PROJ>(pRef);
       }
-      // Add the projection via the Projection base class (only 
+      // Add the projection via the Projection base class (only
       // possible because Event is a friend of Projection)
       Projection* pp = const_cast<Projection*>(cpp);
       pp->project(*this);
@@ -96,7 +96,7 @@
 
   };
 
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Exceptions.hh
==============================================================================
--- trunk/include/Rivet/Exceptions.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Exceptions.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,4 +1,4 @@
-#ifndef RIVET_EXCEPTIONS_HH 
+#ifndef RIVET_EXCEPTIONS_HH
 #define RIVET_EXCEPTIONS_HH
 
 #include <string>
@@ -10,7 +10,7 @@
   /// Generic runtime Rivet error.
   class Error : public std::runtime_error {
   public:
-    Error(const std::string& what) : std::runtime_error(what) {} 
+    Error(const std::string& what) : std::runtime_error(what) {}
   };
 
 
@@ -21,14 +21,14 @@
   /// Error for e.g. use of invalid bin ranges.
   class RangeError : public Error {
   public:
-    RangeError(const std::string& what) : Error(what) {} 
+    RangeError(const std::string& what) : Error(what) {}
   };
 
 
   /// @todo Clarify where this might arise!
   class LogicError : public Error {
   public:
-    LogicError(const std::string& what) : Error(what) {} 
+    LogicError(const std::string& what) : Error(what) {}
   };
 
   /// @brief Errors relating to event/bin weights
@@ -36,7 +36,7 @@
   /// weight is zero or negative.
   class WeightError : public Error {
   public:
-    WeightError(const std::string& what) : Error(what) {} 
+    WeightError(const std::string& what) : Error(what) {}
   };
 
 }

Modified: trunk/include/Rivet/HistoHandler.hh
==============================================================================
--- trunk/include/Rivet/HistoHandler.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/HistoHandler.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -25,7 +25,7 @@
   ///
   class HistoHandler {
   private:
-    
+ 
     /// @name Construction. */
     //@{
     /// The standard constructor.

Modified: trunk/include/Rivet/Jet.hh
==============================================================================
--- trunk/include/Rivet/Jet.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Jet.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -55,12 +55,12 @@
     vector<Particle>& particles() {
       return _fullParticles;
     }
-    
+ 
     /// Get the Rivet::Particles (full information) in this jet (const version)
     const vector<Particle>& particles() const {
       return _fullParticles;
     }
-    
+ 
     /// Number of particles (tracks) in this jet.
     size_t size() const {
       return _particles.size();
@@ -74,7 +74,7 @@
 
     /// Add a particle/track to this jet.
     Jet& addParticle(const Particle& particle);
-    
+ 
     /// Check whether this jet contains a particular particle.
     bool containsParticle(const Particle& particle) const;
 
@@ -89,7 +89,7 @@
 
     /// Check whether this jet contains a bottom-flavoured hadron.
     bool containsBottom() const;
- 
+
     /// Reset this jet as empty.
     Jet& clear();
 
@@ -109,7 +109,7 @@
 
     /// Get equivalent single momentum four-vector. (caches)
     const FourMomentum& momentum() const;
-    
+ 
     // /// Get equivalent single momentum four-vector. (caches)
     // FourMomentum& momentum();
 
@@ -122,7 +122,7 @@
 
     /// Get the energy carried in this jet by neutral particles.
     double neutralEnergy() const;
-    
+ 
     /// Get the energy carried in this jet by hadrons.
     double hadronicEnergy() const;
 
@@ -162,10 +162,10 @@
     mutable double _ptWeightedPhi, _ptWeightedEta;
     mutable bool _okPtWeightedPhi, _okPtWeightedEta;
 
-    /// Cached effective jet 4-vector 
+    /// Cached effective jet 4-vector
     mutable FourMomentum _momentum;
     mutable bool _okMomentum;
-    
+ 
   };
 
 

Modified: trunk/include/Rivet/Makefile.am
==============================================================================
--- trunk/include/Rivet/Makefile.am	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Makefile.am	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,7 +1,7 @@
 ## Internal headers - not to be installed
-nobase_dist_noinst_HEADERS = 
+nobase_dist_noinst_HEADERS =
 ## Public headers - to be installed
-nobase_pkginclude_HEADERS = 
+nobase_pkginclude_HEADERS =
 
 
 ## Rivet interface
@@ -76,7 +76,7 @@
   Projections/VetoedFinalState.hh \
   Projections/VisibleFinalState.hh \
   Projections/WFinder.hh \
-  Projections/ZFinder.hh 
+  Projections/ZFinder.hh
 
 
 ## Tools
@@ -119,4 +119,4 @@
   Math/eigen/matrix.h \
   Math/eigen/vectorbase.h \
   Math/eigen/projective.h \
-  Math/eigen/matrixbase.h 
+  Math/eigen/matrixbase.h

Modified: trunk/include/Rivet/Math/Constants.hh
==============================================================================
--- trunk/include/Rivet/Math/Constants.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/Constants.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,13 +15,13 @@
   static const double     pi2 = pi*pi;
 
   //
-  // 
+  //
   //
   static const double Avogadro = 6.0221367e+23/mole;
 
   //
   // c   = 299.792458 mm/ns
-  // c^2 = 898.7404 (mm/ns)^2 
+  // c^2 = 898.7404 (mm/ns)^2
   //
   static const double c_light   = 2.99792458e+8 * m/s;
   static const double c_squared = c_light * c_light;

Modified: trunk/include/Rivet/Math/MathHeader.hh
==============================================================================
--- trunk/include/Rivet/Math/MathHeader.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/MathHeader.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,7 +12,7 @@
 #include <algorithm>
 
 namespace Rivet {
-  
+
   using std::string;
   using std::ostream;
   using std::ostringstream;
@@ -44,7 +44,7 @@
 
   /// Enum for range of \f$ \phi \f$ to be mapped into
   enum PhiMapping { MINUSPI_PLUSPI, ZERO_2PI };
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Math/MathUtils.hh
==============================================================================
--- trunk/include/Rivet/Math/MathUtils.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/MathUtils.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,7 +11,7 @@
   /// @name Number comparisons etc.
   //@{
 
-  /// Compare a floating point number to zero with a degree 
+  /// Compare a floating point number to zero with a degree
   /// of fuzziness expressed by the absolute @a tolerance parameter.
   inline bool isZero(double val, double tolerance=1E-8) {
     return (fabs(val) < tolerance);
@@ -19,7 +19,7 @@
 
   /// Compare an integral-type number to zero. Since there is no
   /// risk of floating point error, this function just exists in
-  /// case @c isZero is accidentally used on an integer type, to avoid 
+  /// case @c isZero is accidentally used on an integer type, to avoid
   /// implicit type conversion. The @a tolerance parameter is ignored.
   inline bool isZero(long val, double tolerance=1E-8) {
     return val == 0;
@@ -44,7 +44,7 @@
     return (val > 0) ? PLUS : MINUS;
   }
 
-  /// Compare two floating point numbers with a degree of fuzziness 
+  /// Compare two floating point numbers with a degree of fuzziness
   /// expressed by the fractional @a tolerance parameter.
   inline bool fuzzyEquals(double a, double b, double tolerance=1E-5) {
     const double absavg = fabs(a + b)/2.0;
@@ -54,9 +54,9 @@
   }
 
   /// Compare two integral-type numbers with a degree of fuzziness.
-  /// Since there is no risk of floating point error with integral types, 
-  /// this function just exists in case @c fuzzyEquals is accidentally 
-  /// used on an integer type, to avoid implicit type conversion. The @a 
+  /// Since there is no risk of floating point error with integral types,
+  /// this function just exists in case @c fuzzyEquals is accidentally
+  /// used on an integer type, to avoid implicit type conversion. The @a
   /// tolerance parameter is ignored, even if it would have an
   /// absolute magnitude greater than 1.
   inline bool fuzzyEquals(long a, long b, double tolerance=1E-5) {
@@ -71,7 +71,7 @@
   /// Determine if @a value is in the range @a low to @a high, with boundary
   /// types defined by @a lowbound and @a highbound.
   /// @todo Optimise to one-line at compile time?
-  inline bool inRange(double value, double low, double high, 
+  inline bool inRange(double value, double low, double high,
                       RangeBoundary lowbound=OPEN, RangeBoundary highbound=OPEN) {
     if (lowbound == OPEN && highbound == OPEN) {
       return (value > low && value < high);
@@ -105,9 +105,9 @@
     }
     return mean/sample.size();
   }
-  
-  
-  /// Calculate the covariance (variance) between two samples  
+
+
+  /// Calculate the covariance (variance) between two samples
   inline double covariance(const vector<int>& sample1, const vector<int>& sample2) {
     double mean1 = mean(sample1);
     double mean2 = mean(sample2);
@@ -120,8 +120,8 @@
     if (N > 1) return cov/(N-1);
     else return 0.0;
   }
-  
-  
+
+
   /// Calculate the correlation strength between two samples
   inline double correlation(const vector<int>& sample1, const vector<int>& sample2) {
     const double cov = covariance(sample1, sample2);
@@ -181,7 +181,7 @@
   /// @name Phase space measure helpers
   //@{
 
-  /// Calculate the difference between two angles in radians, 
+  /// Calculate the difference between two angles in radians,
   /// returning in the range [0, PI].
   inline double deltaPhi(double phi1, double phi2) {
     return mapAngle0ToPi(phi1 - phi2);

Modified: trunk/include/Rivet/Math/MatrixDiag.hh
==============================================================================
--- trunk/include/Rivet/Math/MatrixDiag.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/MatrixDiag.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -96,10 +96,10 @@
 
 /// Comparison functor for "eigen-pairs".
 template <size_t N>
-struct EigenPairCmp : 
-  public std::binary_function<const typename EigenSystem<N>::EigenPair&, 
+struct EigenPairCmp :
+  public std::binary_function<const typename EigenSystem<N>::EigenPair&,
                               const typename EigenSystem<N>::EigenPair&, bool> {
-  bool operator()(const typename EigenSystem<N>::EigenPair& a, 
+  bool operator()(const typename EigenSystem<N>::EigenPair& a,
                   const typename EigenSystem<N>::EigenPair& b) {
     return a.first < b.first;
   }
@@ -120,13 +120,13 @@
     }
   }
 
-  // Use GSL diagonalization.  
+  // Use GSL diagonalization.
   gsl_matrix* vecs = gsl_matrix_alloc(N, N);
   gsl_vector* vals = gsl_vector_alloc(N);
   gsl_eigen_symmv_workspace* workspace = gsl_eigen_symmv_alloc(N);
   gsl_eigen_symmv(A, vals, vecs, workspace);
   gsl_eigen_symmv_sort(vals, vecs, GSL_EIGEN_SORT_VAL_DESC);
-  
+
   // Build the vector of "eigen-pairs".
   typename EigenSystem<N>::EigenPairs eigensolns;
   for (size_t i = 0; i < N; ++i) {

Modified: trunk/include/Rivet/Math/MatrixN.hh
==============================================================================
--- trunk/include/Rivet/Math/MatrixN.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/MatrixN.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -49,15 +49,15 @@
       for (size_t i = 0; i < N; ++i) {
         rtn.set(i, i, diag[i]);
       }
-      return rtn;    
+      return rtn;
     }
 
-    static Matrix<N> mkIdentity() {    
+    static Matrix<N> mkIdentity() {
       Matrix<N> rtn;
       for (size_t i = 0; i < N; ++i) {
         rtn.set(i, i, 1);
       }
-      return rtn;    
+      return rtn;
     }
 
 

Modified: trunk/include/Rivet/Math/Units.hh
==============================================================================
--- trunk/include/Rivet/Math/Units.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/Units.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -5,28 +5,28 @@
 
 namespace Rivet {
 
-  // 
+  //
   // Length [L]
   //
-  static const double millimeter  = 1.;                        
+  static const double millimeter  = 1.;
   static const double millimeter2 = millimeter*millimeter;
   static const double millimeter3 = millimeter*millimeter*millimeter;
 
-  static const double centimeter  = 10.*millimeter;   
+  static const double centimeter  = 10.*millimeter;
   static const double centimeter2 = centimeter*centimeter;
   static const double centimeter3 = centimeter*centimeter*centimeter;
 
-  static const double meter  = 1000.*millimeter;                  
+  static const double meter  = 1000.*millimeter;
   static const double meter2 = meter*meter;
   static const double meter3 = meter*meter*meter;
 
-  static const double kilometer = 1000.*meter;                   
+  static const double kilometer = 1000.*meter;
   static const double kilometer2 = kilometer*kilometer;
   static const double kilometer3 = kilometer*kilometer*kilometer;
 
   static const double parsec = 3.0856775807e+16*meter;
 
-  static const double micrometer = 1.e-6 *meter;             
+  static const double micrometer = 1.e-6 *meter;
   static const double nanometer  = 1.e-9 *meter;
   static const double angstrom   = 1.e-10*meter;
   static const double picometer  = 1.e-12*meter;
@@ -35,19 +35,19 @@
   static const double fermi      = femtometer;
 
   // symbols
-  static const double mm  = millimeter;                        
+  static const double mm  = millimeter;
   static const double mm2 = millimeter2;
   static const double mm3 = millimeter3;
 
-  static const double cm  = centimeter;   
+  static const double cm  = centimeter;
   static const double cm2 = centimeter2;
   static const double cm3 = centimeter3;
 
-  static const double m  = meter;                  
+  static const double m  = meter;
   static const double m2 = meter2;
   static const double m3 = meter3;
 
-  static const double km  = kilometer;                   
+  static const double km  = kilometer;
   static const double km2 = kilometer2;
   static const double km3 = kilometer3;
 
@@ -67,7 +67,7 @@
   //
   // Angle
   //
-  static const double radian      = 1.;                  
+  static const double radian      = 1.;
   static const double milliradian = 1.e-3*radian;
   static const double degree = (3.14159265358979323846/180.0)*radian;
   static const double steradian = 1.;
@@ -132,7 +132,7 @@
   //
   // Mass [E][T^2][L^-2]
   //
-  static const double  kilogram = joule*second*second/(meter*meter);   
+  static const double  kilogram = joule*second*second/(meter*meter);
   static const double      gram = 1.e-3*kilogram;
   static const double milligram = 1.e-3*gram;
 
@@ -154,7 +154,7 @@
   //
   // Pressure [E][L^-3]
   //
-  #define pascal hep_pascal                          // a trick to avoid warnings 
+  #define pascal hep_pascal                          // a trick to avoid warnings
   static const double hep_pascal = newton/m2;	   // pascal = 6.24150 e+3 * MeV/mm3
   static const double bar        = 100000*pascal; // bar    = 6.24150 e+8 * MeV/mm3
   static const double atmosphere = 101325*pascal; // atm    = 6.32420 e+8 * MeV/mm3

Modified: trunk/include/Rivet/Math/Vector3.hh
==============================================================================
--- trunk/include/Rivet/Math/Vector3.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/Vector3.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -68,18 +68,18 @@
     Vector3& setY(double y) { set(1, y); return *this; }
     Vector3& setZ(double z) { set(2, z); return *this; }
 
-    double dot(const Vector3& v) const { 
+    double dot(const Vector3& v) const {
       return _vec.dot(v._vec);
     }
 
-    Vector3 cross(const Vector3& v) const { 
+    Vector3 cross(const Vector3& v) const {
       Vector3 result;
       result._vec = _vec.cross(v._vec);
       return result;
     }
 
     double angle(const Vector3& v) const {
-      double localDotOther = unit().dot(v.unit());      
+      double localDotOther = unit().dot(v.unit());
       if(Rivet::isZero(localDotOther - 1.0)) return 0.0;
       return acos( localDotOther );
     }
@@ -139,7 +139,7 @@
       case ZERO_2PI:
         if (value >= 0) {
           assert(value >= 0 && value < 2*PI);
-          return value; 
+          return value;
         } else if (Rivet::isZero(value)) {
           value = 0.0;
           return value;
@@ -149,10 +149,10 @@
           return value;
         }
       default:
-        throw std::runtime_error("The specified phi mapping scheme is not yet implemented"); 
+        throw std::runtime_error("The specified phi mapping scheme is not yet implemented");
       }
     }
-    
+ 
     /// Synonym for azimuthalAngle.
     double phi(const PhiMapping mapping = ZERO_2PI) const {
       return azimuthalAngle(mapping);

Modified: trunk/include/Rivet/Math/Vector4.hh
==============================================================================
--- trunk/include/Rivet/Math/Vector4.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/Vector4.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -33,7 +33,7 @@
       this->setZ(other.z());
     }
 
-    FourVector(const Vector<4>& other) 
+    FourVector(const Vector<4>& other)
     : Vector<4>(other) { }
 
     FourVector(const double t, const double x, const double y, const double z) {
@@ -317,7 +317,7 @@
       this->setPz(other.z());
     }
 
-    FourMomentum(const Vector<4>& other) 
+    FourMomentum(const Vector<4>& other)
       : FourVector(other) { }
 
     FourMomentum(const double E, const double px, const double py, const double pz) {
@@ -358,18 +358,18 @@
     FourMomentum& setPz(double pz) { setZ(pz); return *this; }
 
     /// Get squared mass \f$ m^2 = E^2 - p^2 \f$ (the Lorentz self-invariant).
-    double mass2() const { 
+    double mass2() const {
       return invariant();
     }
 
     /// Get mass \f$ m = \sqrt{E^2 - p^2} \f$ (the Lorentz self-invariant).
-    double mass() const { 
+    double mass() const {
       assert(Rivet::isZero(mass2()) || mass2() > 0);
-      return sqrt(mass2()); 
+      return sqrt(mass2());
     }
 
     /// Calculate rapidity.
-    double rapidity() const { 
+    double rapidity() const {
       return 0.5 * std::log( (E() + pz()) / (E() - pz()) );
     }
 
@@ -394,7 +394,7 @@
     }
 
     /// Calculate boost vector (in units of \f$ \beta \f$).
-    Vector3 boostVector() const { 
+    Vector3 boostVector() const {
       // const Vector3 p3 = vector3();
       // const double m2 = mass2();
       // if (Rivet::isZero(m2)) return p3.unit();
@@ -406,45 +406,45 @@
       //   return beta * p3.unit();
       // }
       /// @todo Be careful about c=1 convention...
-      return Vector3(px()/E(), py()/E(), pz()/E()); 
+      return Vector3(px()/E(), py()/E(), pz()/E());
     }
 
     /// struct for sorting by increasing energy
-    
+ 
     struct byEAscending{
       bool operator()(const FourMomentum &left, const FourMomentum &right) const{
         double pt2left = left.E();
         double pt2right = right.E();
         return pt2left < pt2right;
       }
-      
+   
       bool operator()(const FourMomentum *left, const FourMomentum *right) const{
         return (*this)(left, right);
       }
     };
-    
+ 
     /// struct for sorting by decreasing energy
-    
+ 
     struct byEDescending{
       bool operator()(const FourMomentum &left, const FourMomentum &right) const{
         return byEAscending()(right, left);
       }
-      
+   
       bool operator()(const FourMomentum *left, const FourVector *right) const{
         return (*this)(left, right);
       }
     };
-    
+ 
   };
 
 
   /// Get squared mass \f$ m^2 = E^2 - p^2 \f$ (the Lorentz self-invariant) of a momentum 4-vector.
-  inline double mass2(const FourMomentum& v) { 
+  inline double mass2(const FourMomentum& v) {
     return v.mass2();
   }
 
   /// Get mass \f$ m = \sqrt{E^2 - p^2} \f$ (the Lorentz self-invariant) of a momentum 4-vector.
-  inline double mass(const FourMomentum& v) { 
+  inline double mass(const FourMomentum& v) {
     return v.mass();
   }
 
@@ -488,7 +488,7 @@
   /// be chosen via the optional scheme parameter, which is discouraged in this
   /// case since @c RAPIDITY is only a valid option for vectors whose type is
   /// really the FourMomentum derived class.
-  inline double deltaR(const FourVector& a, const FourVector& b, 
+  inline double deltaR(const FourVector& a, const FourVector& b,
                        DeltaRScheme scheme = PSEUDORAPIDITY) {
     switch (scheme) {
     case PSEUDORAPIDITY :
@@ -503,13 +503,13 @@
         }
         return deltaR(*ma, *mb, scheme);
       }
-    default: 
+    default:
       throw std::runtime_error("The specified deltaR scheme is not yet implemented");
     }
   }
 
 
-  inline double deltaR(const FourVector& v, 
+  inline double deltaR(const FourVector& v,
                        double eta2, double phi2,
                        DeltaRScheme scheme = PSEUDORAPIDITY) {
     switch (scheme) {
@@ -524,14 +524,14 @@
         }
         return deltaR(*mv, eta2, phi2, scheme);
       }
-    default: 
+    default:
       throw std::runtime_error("The specified deltaR scheme is not yet implemented");
     }
   }
 
 
   inline double deltaR(double eta1, double phi1,
-                       const FourVector& v, 
+                       const FourVector& v,
                        DeltaRScheme scheme = PSEUDORAPIDITY) {
     switch (scheme) {
     case PSEUDORAPIDITY :
@@ -545,7 +545,7 @@
         }
         return deltaR(eta1, phi1, *mv, scheme);
       }
-    default: 
+    default:
       throw std::runtime_error("The specified deltaR scheme is not yet implemented");
     }
   }
@@ -556,7 +556,7 @@
   /// as to whether the pseudorapidity (a purely geometric concept) or the
   /// rapidity (a relativistic energy-momentum quantity) is to be used: this can
   /// be chosen via the optional scheme parameter.
-  inline double deltaR(const FourMomentum& a, const FourMomentum& b, 
+  inline double deltaR(const FourMomentum& a, const FourMomentum& b,
                        DeltaRScheme scheme = PSEUDORAPIDITY) {
     switch (scheme) {
     case PSEUDORAPIDITY:
@@ -593,12 +593,12 @@
     default:
       throw std::runtime_error("The specified deltaR scheme is not yet implemented");
     }
-  }  
+  }
 
 
   //////////////////////////////////////////////////////
-  
-  
+
+
   /// Render a 4-vector as a string.
   inline const string toString(const FourVector& lv) {
     ostringstream out;
@@ -609,7 +609,7 @@
         << ")";
     return out.str();
   }
-  
+
   /// Write a 4-vector to an ostream.
   inline std::ostream& operator<<(std::ostream& out, const FourVector& lv) {
     out << toString(lv);

Modified: trunk/include/Rivet/Math/VectorN.hh
==============================================================================
--- trunk/include/Rivet/Math/VectorN.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/VectorN.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -27,7 +27,7 @@
   public:
     Vector() { _vec.loadZero(); }
 
-    Vector(const Vector<N>& other) 
+    Vector(const Vector<N>& other)
       : _vec(other._vec) { }
 
     const double& get(const size_t index) const {

Modified: trunk/include/Rivet/Math/eigen/matrixbase.h
==============================================================================
--- trunk/include/Rivet/Math/eigen/matrixbase.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/eigen/matrixbase.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -764,7 +764,7 @@
     }
 
     /** Tests whether *this is approximately equal to the zero matrix.
-      * 
+      *
       * Equivalent to isNegligible(1). In other words, returns true if
       * all entries of *this are approximately zero, in the sense that
       * they have absolute value smaller than epsilon.

Modified: trunk/include/Rivet/Math/eigen/projective.h
==============================================================================
--- trunk/include/Rivet/Math/eigen/projective.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/eigen/projective.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -243,7 +243,7 @@
     MatrixP & operator *= ( const MatrixP & other )
     { m_mat *= other.m_mat; return *this; }
 
-    /** Matrix-matrix product. Calls Matrix::operator*(). For better performance use 
+    /** Matrix-matrix product. Calls Matrix::operator*(). For better performance use
       * multiply(const MatrixP &, MatrixP *) const instead.
       *
       * \sa multiply(const MatrixP &, MatrixP *) const

Modified: trunk/include/Rivet/Math/eigen/vectorbase.h
==============================================================================
--- trunk/include/Rivet/Math/eigen/vectorbase.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Math/eigen/vectorbase.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -598,7 +598,7 @@
     }
 
     /** Tests whether *this is approximately equal to the zero matrix.
-      * 
+      *
       * Equivalent to isNegligible(1). In other words, returns true if
       * all entries of *this are approximately zero, in the sense that
       * they have absolute value smaller than epsilon.

Modified: trunk/include/Rivet/Projection.hh
==============================================================================
--- trunk/include/Rivet/Projection.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projection.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -24,17 +24,17 @@
   /// The main virtual functions to be overridden by concrete sub-classes
   /// are project(const Event &) and compare(const Projection &).
   class Projection : public ProjectionApplier {
-    
+ 
   public:
-    
+ 
     /// Event is a friend.
     friend class Event;
-    
+ 
     /// The Cmp specialization for Projection is a friend.
     friend class Cmp<Projection>;
-    
+ 
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor.
@@ -42,14 +42,14 @@
 
     /// Clone on the heap.
     virtual const Projection* clone() const = 0;
-    
+ 
     /// The destructor.
     virtual ~Projection();
     //@}
-    
+ 
 
   public:
-    
+ 
     /// Take the information available in the Event and make the
     /// calculations necessary to obtain the projection. Note that this
     /// function must never be called except inside the
@@ -57,7 +57,7 @@
     virtual void project(const Event& e) = 0;
 
 
-  protected:    
+  protected:
 
     /// This function is used to define a unique ordering between
     /// different Projection objects of the same class. If this is
@@ -79,7 +79,7 @@
     /// whether this should be ordered before or after \a p, or if it is
     /// equivalent with \a p.
     virtual int compare(const Projection& p) const = 0;
-    
+ 
   public:
 
     /// Determine whether this object should be ordered before the object
@@ -88,7 +88,7 @@
     /// objects is used. Otherwise, if the objects are of the same class,
     /// the virtual compare(const Projection &) will be returned.
     bool before(const Projection& p) const;
-    
+ 
     /// Return the BeamConstraints for this projection, not including
     /// recursion. Derived classes should ensure that all contained projections
     /// are registered in the @a _projections set for the beam constraint
@@ -106,8 +106,8 @@
       _beamPairs.insert(BeamPair(beam1, beam2));
       return *this;
     }
-    
-    
+ 
+ 
     /// Get a Log object based on the getName() property of the calling projection object.
     Log& getLog() const {
       string logname = "Rivet.Projection." + name();
@@ -129,7 +129,7 @@
     /// Shortcut to make a named Cmp<Projection> comparison with the @c *this
     /// object automatically passed as one of the parent projections.
     Cmp<Projection> mkPCmp(const Projection& otherparent, const std::string& pname) const;
-   
+
 
   private:
 
@@ -139,7 +139,7 @@
 
     /// Beam-type constraint.
     set<BeamPair> _beamPairs;
-    
+ 
   };
 
 
@@ -147,7 +147,7 @@
 
 
 /// Define "less" operator for Projection* containers in terms of the Projection::before virtual method.
-inline bool std::less<const Rivet::Projection *>::operator()(const Rivet::Projection* x, 
+inline bool std::less<const Rivet::Projection *>::operator()(const Rivet::Projection* x,
                                                              const Rivet::Projection* y) const {
   return x->before(*y);
 }

Modified: trunk/include/Rivet/ProjectionApplier.hh
==============================================================================
--- trunk/include/Rivet/ProjectionApplier.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/ProjectionApplier.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -61,7 +61,7 @@
     //@}
 
 
-    /// @name Projection applying functions 
+    /// @name Projection applying functions
     //@{
     /// Apply the supplied projection on @a event.
     template <typename PROJ>
@@ -83,7 +83,7 @@
       return pcast<PROJ>(_applyProjection(evt, name));
     }
     //@}
-   
+
 
   protected:
 
@@ -101,12 +101,12 @@
   protected:
 
 
-    /// @name Projection registration functions 
+    /// @name Projection registration functions
     //@{
 
     /// Register a contained projection. The type of the argument is used to
-    /// instantiate a new projection internally: this new object is applied to 
-    /// events rather than the argument object. Hence you are advised to only use 
+    /// instantiate a new projection internally: this new object is applied to
+    /// events rather than the argument object. Hence you are advised to only use
     /// locally-scoped Projection objects in your Projection and Analysis
     /// constructors, and to avoid polymorphism (e.g. handling @c ConcreteProjection
     /// via a pointer or reference to type @c Projection) since this will screw
@@ -122,14 +122,14 @@
     const Projection& _addProjection(const Projection& proj, const std::string& name);
 
     //@}
-    
-    
+ 
+ 
   private:
-    
+ 
     /// Non-templated version of string-based applyProjection, to work around
     /// header dependency issue.
     const Projection& _applyProjection(const Event& evt, const std::string& name) const;
-    
+ 
     /// Non-templated version of proj-based applyProjection, to work around
     /// header dependency issue.
     const Projection& _applyProjection(const Event& evt, const Projection& proj) const;
@@ -139,13 +139,13 @@
 
     /// Flag to forbid projection registration in analyses until the init phase
     bool _allowProjReg;
-    
-    
+ 
+ 
   private:
-    
+ 
     /// Pointer to projection handler.
     ProjectionHandler* _projhandler;
-    
+ 
   };
 
 }

Modified: trunk/include/Rivet/ProjectionHandler.hh
==============================================================================
--- trunk/include/Rivet/ProjectionHandler.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/ProjectionHandler.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -33,7 +33,7 @@
   /// as allowing analysis classes to contain fewer data members (since
   /// projections are now better accessed by name than by storing a data member
   /// reference or pointer).
-  /// 
+  ///
   /// The core of the ProjectionHandler design is that it is a singleton class,
   /// essentially a wrapper around a map of @c Projection*, indexed by a hash of
   /// the registering object and its local name for the registered projection.
@@ -83,7 +83,7 @@
 
 
   private:
-    
+ 
     /// @name Construction. */
     //@{
     /// The standard constructor.
@@ -106,13 +106,13 @@
     /// @name Projection registration
     //@{
     /// Attach and retrieve a projection as a reference.
-    const Projection& registerProjection(const ProjectionApplier& parent, 
-                                         const Projection& proj, 
+    const Projection& registerProjection(const ProjectionApplier& parent,
+                                         const Projection& proj,
                                          const string& name);
 
     /// Attach and retrieve a projection as a pointer.
-    const Projection* registerProjection(const ProjectionApplier& parent, 
-                                         const Projection* proj, 
+    const Projection* registerProjection(const ProjectionApplier& parent,
+                                         const Projection* proj,
                                          const string& name);
     //@}
 
@@ -126,20 +126,20 @@
     /// @returns 0 if no equivalent projection found
     const Projection* _getEquiv(const Projection& proj) const;
 
-    /// Make a clone of proj, copying across child references from the original 
-    const Projection* _clone(const ProjectionApplier& parent, 
+    /// Make a clone of proj, copying across child references from the original
+    const Projection* _clone(const ProjectionApplier& parent,
                              const Projection& proj);
 
     /// Internal function to do the registering
-    const Projection* _register(const ProjectionApplier& parent, 
+    const Projection* _register(const ProjectionApplier& parent,
                                 const Projection& proj,
                                 const string& name);
 
     /// Get a string dump of the current ProjHandler structure
     string _getStatus() const;
-    
+ 
     /// Check that this parent projection doesn't already use this name
-    bool _checkDuplicate(const ProjectionApplier& parent, 
+    bool _checkDuplicate(const ProjectionApplier& parent,
                          const Projection& proj,
                          const string& name) const;
 
@@ -157,7 +157,7 @@
     /// problems and there is no need to do so.
     const Projection& getProjection(const ProjectionApplier& parent,
                                     const string& name) const;
-    
+ 
     /// Get child projections for the given parent. By default this will just
     /// return the projections directly contained by the @a parent, but the @a
     /// depth argument can be changed to do a deep retrieval, which will recurse

Modified: trunk/include/Rivet/Projections/AxesDefinition.hh
==============================================================================
--- trunk/include/Rivet/Projections/AxesDefinition.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/AxesDefinition.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -34,7 +34,7 @@
     ///@}
 
   };
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/Beam.hh
==============================================================================
--- trunk/include/Rivet/Projections/Beam.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/Beam.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,11 +11,11 @@
 
   /// Project out the incoming beams
   class Beam : public Projection {
-    
+ 
   public:
-    
+ 
     /// The default constructor.
-    Beam() { 
+    Beam() {
       setName("Beam");
     }
 
@@ -34,7 +34,7 @@
 
     /// The pair of beam particle PDG codes in the current collision.
     const BeamPair beamIDs() const {
-      return make_pair(beams().first.pdgId(), 
+      return make_pair(beams().first.pdgId(),
                        beams().second.pdgId());
     }
 
@@ -57,7 +57,7 @@
 
 
   private:
-    /// The beam particles in the current collision in GenEvent 
+    /// The beam particles in the current collision in GenEvent
     ParticlePair _theBeams;
 
   };

Modified: trunk/include/Rivet/Projections/ChargedFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/ChargedFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/ChargedFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,11 +16,11 @@
   class ChargedFinalState : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
     ChargedFinalState(const FinalState& fsp);
-    
+ 
     ChargedFinalState(double mineta = -MAXRAPIDITY,
                       double maxeta =  MAXRAPIDITY,
                       double minpt  =  0.0*GeV);
@@ -33,15 +33,15 @@
 
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/ChargedLeptons.hh
==============================================================================
--- trunk/include/Rivet/Projections/ChargedLeptons.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/ChargedLeptons.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,43 +13,43 @@
   /// Project out charged final-state leptons (i.e. electrons and muons, unless
   /// you set taus stable!)
   class ChargedLeptons : public Projection {
-    
+ 
   public:
-    
+ 
     /// Constructor
     ChargedLeptons(const FinalState& fsp)
-    { 
+    {
       setName("ChargedLeptons");
       addProjection(ChargedFinalState(fsp), "ChFS");
     }
-    
+ 
     /// Clone on the heap.
     virtual const Projection* clone() const {
       return new ChargedLeptons(*this);
     }
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     void project(const Event& evt);
-    
+ 
     /// Compare projections.
     int compare(const Projection& other) const;
-    
+ 
   public:
-    
+ 
     /// Access the projected leptons.
-    const ParticleVector& chargedLeptons() const { 
-      return _theChargedLeptons; 
+    const ParticleVector& chargedLeptons() const {
+      return _theChargedLeptons;
     }
-    
+ 
   private:
 
     /// The leptons
     ParticleVector _theChargedLeptons;
-        
+     
   };
-  
+
 
 }
 

Modified: trunk/include/Rivet/Projections/ClusteredPhotons.hh
==============================================================================
--- trunk/include/Rivet/Projections/ClusteredPhotons.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/ClusteredPhotons.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,12 +18,12 @@
   class ClusteredPhotons : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
     /// Constructor with the two final states, and the maximum separation in dR
     /// for clustered photons
-    ClusteredPhotons(const FinalState& fs, const FinalState& signal, double dRmax) 
+    ClusteredPhotons(const FinalState& fs, const FinalState& signal, double dRmax)
       : _dRmax(dRmax)
     {
       setName("ClusteredPhotons");
@@ -39,15 +39,15 @@
       return new ClusteredPhotons(*this);
     }
     //@}
-    
+ 
 
   public:
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -56,10 +56,10 @@
 
     /// maximum cone radius to find photons in
     double _dRmax;
-    
+ 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/DISKinematics.hh
==============================================================================
--- trunk/include/Rivet/Projections/DISKinematics.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/DISKinematics.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,10 +16,10 @@
   class DISKinematics : public Projection {
 
   public:
-        
+     
     /// The default constructor.
-    DISKinematics() 
-      : _theQ2(-1.0), _theW2(-1.0), _theX(-1.0), _theY(-1.0), _theS(-1.0) 
+    DISKinematics()
+      : _theQ2(-1.0), _theW2(-1.0), _theX(-1.0), _theY(-1.0), _theS(-1.0)
     {
       setName("DISKinematics");
       //addBeamPair(ANY, hadid);
@@ -30,11 +30,11 @@
     /// Clone on the heap.
     virtual const Projection* clone() const {
       return new DISKinematics(*this);
-    }    
+    }
 
-    
+ 
   protected:
-    
+ 
     /// Perform the projection operation on the supplied event.
     virtual void project(const Event& e);
 
@@ -63,7 +63,7 @@
 
     /// The LorentzRotation needed to boost a particle to the hadronic CM frame.
     const LorentzTransform& boostHCM() const {
-      return _hcm; 
+      return _hcm;
     }
 
     /// The LorentzRotation needed to boost a particle to the hadronic Breit frame.
@@ -75,7 +75,7 @@
     const Particle& beamHadron() const {
       return _inHadron;
     }
-    
+ 
   private:
 
     /// The \f$Q^2\f$.
@@ -94,7 +94,7 @@
     double _theS;
 
     Particle _inHadron;
-    
+ 
     /// The LorentzRotation needed to boost a particle to the hadronic CM frame.
     LorentzTransform _hcm;
 

Modified: trunk/include/Rivet/Projections/DISLepton.hh
==============================================================================
--- trunk/include/Rivet/Projections/DISLepton.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/DISLepton.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,58 +11,58 @@
 
 
   /// This class projects out the incoming and outgoing leptons in a DIS
-  /// event. 
+  /// event.
   class DISLepton : public Projection {
-    
+ 
   public:
-    
+ 
     /// @name Constructors.
     //@{
-    
+ 
     DISLepton(){
       setName("DISLepton");
       addProjection(Beam(), "Beam");
       addProjection(FinalState(), "FS");
     }
-    
+ 
     /// Clone on the heap.
     virtual const Projection* clone() const {
       return new DISLepton(*this);
     }
     //@}
-    
-    
+ 
+ 
   protected:
-    
+ 
     /// Perform the projection operation on the supplied event.
     virtual void project(const Event& e);
-    
+ 
     /// Compare with other projections.
     virtual int compare(const Projection& p) const;
-    
+ 
   public:
-    
+ 
     /// The incoming lepton.
     const Particle& in() const { return _incoming; }
-    
+ 
     /// The outgoing lepton.
     const Particle& out() const { return _outgoing; }
-    
+ 
     const double &pzSign() const { return _sign; }
-    
+ 
   private:
-    
+ 
     /// The incoming lepton.
     Particle _incoming;
-    
+ 
     /// The outgoing lepton.
     Particle _outgoing;
-        
+     
     /// The sign of the PZ of the incoming lepton
     double _sign;
-    
+ 
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/FinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/FinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/FinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,9 +11,9 @@
 
 
   /// Project out all final-state particles in an event.
-  class FinalState : public Projection {    
+  class FinalState : public Projection {
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor. May specify the minimum and maximum
@@ -21,7 +21,7 @@
     FinalState(double mineta = -MAXRAPIDITY,
                double maxeta =  MAXRAPIDITY,
                double minpt  =  0.0*GeV);
-    
+ 
     /// A constructor which allows to specify multiple eta ranges
     /// and the min \f$ p_T \f$ (in GeV).
     FinalState(const vector<pair<double, double> >& etaRanges,
@@ -71,40 +71,40 @@
   public:
 
     typedef Particle entity_type;
-    typedef ParticleVector collection_type; 
+    typedef ParticleVector collection_type;
 
     /// Template-usable interface common to JetAlg.
-    const collection_type& entities() const { 
-      return particles(); 
+    const collection_type& entities() const {
+      return particles();
     }
 
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     virtual void project(const Event& e);
-    
+ 
     /// Compare projections.
     virtual int compare(const Projection& p) const;
 
     /// Decide if a particle is to be accepted or not.
     bool accept(const Particle& p) const;
 
-    
-  protected:
  
+  protected:
+
     /// The ranges allowed for pseudorapidity.
     vector<pair<double,double> > _etaRanges;
-    
+ 
     /// The minimum allowed transverse momentum.
     double _ptmin;
-    
+ 
     /// The final-state particles.
     mutable ParticleVector _theParticles;
-    
+ 
   };
 
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/FinalStateHCM.hh
==============================================================================
--- trunk/include/Rivet/Projections/FinalStateHCM.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/FinalStateHCM.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,10 +15,10 @@
   class FinalStateHCM: public FinalState {
 
   public:
-    
+ 
     /// Constructor
     FinalStateHCM(const DISKinematics& kinematicsp)
-    { 
+    {
       setName("FinalStateHCM");
       addProjection(kinematicsp, "Kinematics");
     }
@@ -27,16 +27,16 @@
     virtual const Projection* clone() const {
       return new FinalStateHCM(*this);
     }
-    
+ 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
-    int compare(const Projection& p) const;    
+    int compare(const Projection& p) const;
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/FoxWolframMoments.hh
==============================================================================
--- trunk/include/Rivet/Projections/FoxWolframMoments.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/FoxWolframMoments.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,15 +17,15 @@
 namespace Rivet {
 
 
-  /// Project out the total visible energy vector, allowing missing 
+  /// Project out the total visible energy vector, allowing missing
   /// \f$ E_T \f$ etc. to be calculated.
   class FoxWolframMoments : public Projection {
-    
+ 
   public:
-    
+ 
     /// Constructor.
     FoxWolframMoments(const FinalState& fsp)
-    { 
+    {
         setName("FoxWolframMoments");
         addProjection(fsp, "FS");
         //addProjection(TotalVisibleMomentum(fsp), "SumET");
@@ -48,29 +48,29 @@
     virtual const Projection* clone() const {
       return new FoxWolframMoments(*this);
     }
-    
+ 
   public:
 
     /// The projected Fox-Wolfram Moment of order l
-      const double getFoxWolframMoment(unsigned int l) const { 
+      const double getFoxWolframMoment(unsigned int l) const {
         if ( l < MAXMOMENT )
-            return _fwmoments[l]; 
+            return _fwmoments[l];
         else return -666.0;
       }
-      
+   
   protected:
-    
+ 
     /// Apply the projection to the event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
-        
+     
   private:
       vector<double> _fwmoments;
 
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/HadronicFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/HadronicFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/HadronicFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,15 +16,15 @@
   class HadronicFinalState : public FinalState {
 
   public:
-    
+ 
     /// Constructor: the supplied FinalState projection is assumed to live through the run.
-    HadronicFinalState(FinalState& fsp) 
+    HadronicFinalState(FinalState& fsp)
       : FinalState(fsp)
-    { 
+    {
       setName("HadronicFinalState");
       addProjection(fsp, "FS");
     }
-    
+ 
     HadronicFinalState(double mineta = -MAXRAPIDITY,
                        double maxeta = MAXRAPIDITY,
                        double minpt = 0.0*GeV)
@@ -40,16 +40,16 @@
     }
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
-    
+ 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/Hemispheres.hh
==============================================================================
--- trunk/include/Rivet/Projections/Hemispheres.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/Hemispheres.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,24 +18,24 @@
 
      @todo Allow axes to be defined by sphericity: superclass Thrust and Sphericity as AxisDefinition?
 
-     The "high" hemisphere mass, 
+     The "high" hemisphere mass,
      \f$ M^2_\mathrm{high} / E^2_\mathrm{vis} \f$, is defined as
      \f[
-     \frac{M^2_\mathrm{high}}{E^2_\mathrm{vis}} = 
+     \frac{M^2_\mathrm{high}}{E^2_\mathrm{vis}} =
      \frac{1}{E^2_\mathrm{vis}} \max
      \left(
      \left| \sum_{\vec{p}_k \cdot \vec{n}_\mathrm{T} > 0} p_k \right|^2 ,
      \left| \sum_{\vec{p}_k \cdot \vec{n}_\mathrm{T} < 0} p_k \right|^2
      \right)
      \f]
-     and the corresponding "low" hemisphere mass, 
+     and the corresponding "low" hemisphere mass,
      \f$ M^2_\mathrm{low} / E^2_\mathrm{vis} \f$,
-     is the sum of momentum vectors in the opposite hemisphere, i.e. 
+     is the sum of momentum vectors in the opposite hemisphere, i.e.
      \f$ \max \rightarrow \min \f$ in the formula above.
 
      Finally, we define a hemisphere mass difference:
      \f[
-     \frac{M^2_\mathrm{diff} }{ E^2_\mathrm{vis}} = 
+     \frac{M^2_\mathrm{diff} }{ E^2_\mathrm{vis}} =
      \frac{ M^2_\mathrm{high} - M^2_\mathrm{low} }{ E^2_\mathrm{vis}} .
      \f]
 
@@ -44,10 +44,10 @@
      \f[
      B_\pm =
      \frac{
-       \sum{\pm \vec{p}_i \cdot \vec{n}_\mathrm{T} > 0} 
-       |\vec{p}_i \times \vec{n}_\mathrm{T} | 
+       \sum{\pm \vec{p}_i \cdot \vec{n}_\mathrm{T} > 0}
+       |\vec{p}_i \times \vec{n}_\mathrm{T} |
      }{
-       2 \sum_i | \vec{p}_i | 
+       2 \sum_i | \vec{p}_i |
      }
      \f]
      and then a set of the broadening maximum, minimum, sum and difference as follows:
@@ -96,20 +96,20 @@
     const double M2high() const { return _M2high; }
     const double M2low() const { return _M2low; }
     const double M2diff() const { return _M2high -_M2low; }
-    const double scaledM2high() const { 
+    const double scaledM2high() const {
       if (_M2high == 0.0) return 0.0;
-      if (_E2vis != 0.0) return _M2high/_E2vis; 
-      else return std::numeric_limits<double>::max(); 
+      if (_E2vis != 0.0) return _M2high/_E2vis;
+      else return std::numeric_limits<double>::max();
     }
     const double scaledM2low() const {
       if (_M2low == 0.0) return 0.0;
       if (_E2vis != 0.0) return _M2low/_E2vis;
-      else return std::numeric_limits<double>::max(); 
+      else return std::numeric_limits<double>::max();
     }
-    const double scaledM2diff() const { 
+    const double scaledM2diff() const {
       if (M2diff() == 0.0) return 0.0;
-      if (_E2vis != 0.0) return M2diff()/_E2vis; 
-      else return std::numeric_limits<double>::max(); 
+      if (_E2vis != 0.0) return M2diff()/_E2vis;
+      else return std::numeric_limits<double>::max();
     }
     ///@}
 
@@ -128,7 +128,7 @@
       return _highMassEqMaxBroad;
     }
 
-        
+     
   private:
 
     /// Visible energy-squared, \f$ E^2_\mathrm{vis} \f$.

Modified: trunk/include/Rivet/Projections/IdentifiedFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/IdentifiedFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/IdentifiedFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,11 +16,11 @@
   class IdentifiedFinalState : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
     /// Default constructor.
-    IdentifiedFinalState(double etamin=-MAXRAPIDITY, double etamax=MAXRAPIDITY, double ptMin=0.0*GeV) 
+    IdentifiedFinalState(double etamin=-MAXRAPIDITY, double etamax=MAXRAPIDITY, double ptMin=0.0*GeV)
       : FinalState(etamin, etamax, ptMin)
     {
       setName("IdentifiedFinalState");
@@ -39,7 +39,7 @@
       return new IdentifiedFinalState(*this);
     }
     //@}
-    
+ 
 
   public:
 
@@ -47,7 +47,7 @@
     const set<PdgId>& acceptedIds() const {
       return _pids;
     }
-  
+
     /// Add an accepted particle ID.
     IdentifiedFinalState& acceptId(PdgId pid) {
       _pids.insert(pid);
@@ -98,13 +98,13 @@
     void reset() {
       _pids.clear();
     }
-    
+ 
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -113,10 +113,10 @@
 
     /// The final-state particles.
     set<PdgId> _pids;
-    
+ 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/InitialQuarks.hh
==============================================================================
--- trunk/include/Rivet/Projections/InitialQuarks.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/InitialQuarks.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,15 +11,15 @@
 
   /// Project out all final-state particles in an event.
   class InitialQuarks : public Projection {
-    
+ 
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor. May specify the minimum and maximum
     /// pseudorapidity \f$ \eta \f$ and the min \f$ p_T \f$ (in GeV).
     InitialQuarks()
-    { 
+    {
       setName("InitialQuarks");
     }
 
@@ -29,7 +29,7 @@
       return new InitialQuarks(*this);
     }
     //@}
-        
+     
     /// Access the projected final-state particles.
     virtual const ParticleVector& particles() const { return _theParticles; }
 
@@ -37,20 +37,20 @@
     virtual const bool empty() const { return _theParticles.empty(); }
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     virtual void project(const Event& e);
-    
+ 
     /// Compare projections.
     virtual int compare(const Projection& p) const;
-    
-  protected:
  
+  protected:
+
     /// The final-state particles.
     ParticleVector _theParticles;
-    
+ 
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/InvMassFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/InvMassFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/InvMassFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,7 +11,7 @@
   class InvMassFinalState : public FinalState {
 
   public:
-    
+ 
     // Constructor for a single inv-mass pair
     InvMassFinalState(const FinalState& fsp,
                       const std::pair<long, long>& idpair, // pair of decay products
@@ -23,8 +23,8 @@
                       const std::vector<std::pair<long, long> >& idpairs,  // vector of pairs of decay products
                       double minmass, // min inv mass
                       double maxmass); // max inv mass
-    
-    
+ 
+ 
     /// Clone on the heap.
     virtual const Projection* clone() const {
     	return new InvMassFinalState(*this);
@@ -32,28 +32,28 @@
 		
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
 
   private:
-    
+ 
     /// ids of the decay products
     std::vector<std::pair<long, long> > _decayids;
-   
+
     /// min inv mass
     double _minmass;
 
     /// max inv mass
     double _maxmass;
-    
+ 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/IsolationEstimators.hh
==============================================================================
--- trunk/include/Rivet/Projections/IsolationEstimators.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/IsolationEstimators.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,13 +12,13 @@
 namespace Rivet {
 
 
-  template < typename T, typename C > 
+  template < typename T, typename C >
   class IsolationEstimator {
 
     public:
 
     virtual ~IsolationEstimator(){};
-    
+ 
       virtual double estimate(const T & t, const C & c) const = 0;
 
       virtual int compare(const IsolationEstimator < T, C > *other) const = 0;
@@ -41,7 +41,7 @@
 
   // An estimator for the sum of the pt of the particles in collection C
   // being within radius from t
-  template < class T, class C > 
+  template < class T, class C >
   class PtInConeEstimator : public IsolationEstimator < T, C > {
   public:
     PtInConeEstimator(double radius, double ptmin = 0.0)
@@ -68,15 +68,15 @@
       if (radcmp != 0)
          return radcmp;
        return 0;
-    } 
-    
+    }
+ 
     double radius() const {
       return _radius;
-    } 
-    
+    }
+ 
     double ptMin() const {
       return _ptmin;
-    } 
+    }
   private:
     double _radius;
     double _ptmin;
@@ -85,12 +85,12 @@
 
   // An estimator for the number of particles in collection C
   // being within radius from t
-  template < class T, class C > 
+  template < class T, class C >
   class MultiplicityInConeEstimator : public IsolationEstimator < T, C > {
   public:
     MultiplicityInConeEstimator(double radius, double ptmin = 0.0)
       : _radius(radius), _ptmin(ptmin) {  }
-    
+ 
     virtual double estimate(const T & t, const C & c) const {
       double npart = 0;
       for (typename C::const_iterator ic = c.begin(); ic != c.end(); ++ic) {
@@ -99,7 +99,7 @@
         if (deltaR(t.momentum(), ic->momentum()) < _radius) {
           npart++;
         }
-      } 
+      }
       return npart;
     }
 
@@ -112,16 +112,16 @@
       if (radcmp != 0)
          return radcmp;
        return 0;
-    } 
-    
+    }
+ 
     double radius() const {
       return _radius;
-    } 
-    
+    }
+ 
     double ptMin() const {
       return _ptmin;
-    } 
-    
+    }
+ 
   private:
     double _radius;
     double _ptmin;
@@ -140,7 +140,7 @@
             typedef IsolationEstimator<TYPE1, TYPE2> estimatorhelper;
   };
 
- 
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/IsolationProjection.hh
==============================================================================
--- trunk/include/Rivet/Projections/IsolationProjection.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/IsolationProjection.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,27 +11,27 @@
 namespace Rivet{
 
 
-  /// PROJ1 can be either FinalState projections or JetAlg projections 
+  /// PROJ1 can be either FinalState projections or JetAlg projections
   /// PROJ1::entity_type and PROJ2::entity_type can be either Particle of Jet
-  template <typename PROJ1, typename PROJ2, 
+  template <typename PROJ1, typename PROJ2,
             typename EST = typename isohelper<typename PROJ1::entity_type, typename PROJ2::collection_type>::estimatorhelper>
   class IsolationProjection : public Projection {
     public:
     /// Constructor
-    IsolationProjection(PROJ1& iso, 
-                        PROJ2& ctrl, 
+    IsolationProjection(PROJ1& iso,
+                        PROJ2& ctrl,
                         EST* estimator,
-                        double ptmin = 0*GeV) : 
-      _estimator(estimator), 
+                        double ptmin = 0*GeV) :
+      _estimator(estimator),
       _ptmin(ptmin)
     {
       setName("IsolationProjection");
-      addProjection(iso, "ToBeIsolated"); 
-      addProjection(ctrl, "Control"); 
+      addProjection(iso, "ToBeIsolated");
+      addProjection(ctrl, "Control");
     }	
 
     /// Get the isolation values for the isofinalstate
-    const vector<pair<const typename PROJ1::entity_type*, double> > 
+    const vector<pair<const typename PROJ1::entity_type*, double> >
     isolatedParticles(double maxiso = numeric_limits<double>::max()) const;
 
     virtual const Projection* clone() const {
@@ -45,10 +45,10 @@
 
     /// Compare projections.
     virtual int compare(const Projection& p) const;
-		    
+		
 
   private:
-    
+ 
     /// the estimator
     boost::shared_ptr<EST> _estimator;
 
@@ -68,10 +68,10 @@
   inline const vector<pair<const typename PROJ1::entity_type*, double> > IsolationProjection<PROJ1, PROJ2, EST>
   ::isolatedParticles(double maxiso) const {
     vector<pair<const typename PROJ1::entity_type*, double> > out;
-    for (typename vector<pair<const typename PROJ1::entity_type*, double> >::const_iterator i = _isovalues.begin(); i != _isovalues.end(); ++i){ 
+    for (typename vector<pair<const typename PROJ1::entity_type*, double> >::const_iterator i = _isovalues.begin(); i != _isovalues.end(); ++i){
       if (i->second < maxiso) out.push_back(*i);
     }
-    return out;  
+    return out;
   }
 
 
@@ -81,7 +81,7 @@
     _isovalues.clear();
     /// projetc the final states
     const PROJ1& isofs  = applyProjection<PROJ1>(e, "ToBeIsolated");
-    /// copy of particles is suboptimal, but FinalState returns 
+    /// copy of particles is suboptimal, but FinalState returns
     /// particles by referencem while JetAlg returns jets by value
     const typename PROJ1::collection_type isopart = isofs.entities();
     const PROJ2& ctrlfs = applyProjection<PROJ2>(e, "Control");
@@ -109,7 +109,7 @@
     // compare the estimators
     //if (cmp(*(_estimator.get()),*(other._estimator.get())) == EQUIVALENT) cout << "Estimatori uguali!" << endl;
     return cmp(*(_estimator.get()),*(other._estimator.get()));
-  } 
+  }
 
 }
 

Modified: trunk/include/Rivet/Projections/IsolationTools.hh
==============================================================================
--- trunk/include/Rivet/Projections/IsolationTools.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/IsolationTools.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,7 +6,7 @@
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/JetAlg.hh"
 
-  
+
 namespace Rivet{
   typedef IsolationProjection<JetAlg, JetAlg> AllJetsIso;
 

Modified: trunk/include/Rivet/Projections/JetAlg.hh
==============================================================================
--- trunk/include/Rivet/Projections/JetAlg.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/JetAlg.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -54,10 +54,10 @@
     return fabs(a.rapidity()) < fabs(b.rapidity());
   }
 
-  
+
   /// Abstract base class for projections which can return a set of {@link Jet}s.
   class JetAlg : public Projection {
-    
+ 
   public:
 
     /// Constructor
@@ -106,7 +106,7 @@
     virtual void reset() = 0;
 
     typedef Jet entity_type;
-    typedef Jets collection_type; 
+    typedef Jets collection_type;
 
     /// Template-usable interface common to FinalState.
     collection_type entities() const { return jets(); }

Modified: trunk/include/Rivet/Projections/JetShape.hh
==============================================================================
--- trunk/include/Rivet/Projections/JetShape.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/JetShape.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,16 +15,16 @@
   /**
      @brief Calculate the jet shape.
 
-     Calculate the differential and integral jet shapes in \f$P_{\perp}\f$ for a given 
+     Calculate the differential and integral jet shapes in \f$P_{\perp}\f$ for a given
      set of jet axes each event.
-     
+  
      The rapidity scheme (\f$ \eta \f$ or \f$ y \f$) has to be specified when
      invoking the constructor.
 
-     The differential jet shape around a given jet axis at distance interval 
+     The differential jet shape around a given jet axis at distance interval
      \f$ r \pm \delta{r}/2 \f$ is defined as
      \f[
-     \rho(r) = 
+     \rho(r) =
        \frac{1}{\delta r} \frac{1}{N_\mathrm{jets}}
        \sum_\mathrm{jets} \frac{P_\perp(r - \delta r/2, r+\delta r/2)}{p_\perp(0, R)}
      \f]
@@ -32,12 +32,12 @@
 
      The integral jet shape around a given jet axes until distance \f$ r \f$ is defined as
      \f[
-     \Psi(r) = 
+     \Psi(r) =
        \frac{1}{N_\mathrm{jets}}
-       \sum_\mathrm{jets} \frac{P_\perp(0, r)}{p_\perp(0, R)} 
+       \sum_\mathrm{jets} \frac{P_\perp(0, r)}{p_\perp(0, R)}
      \f]
      with \f$ 0 \le r \le R \f$ and \f$ P_\perp(r_1, r_2) = \sum_{\in [r_1, r_2)} p_\perp \f$.
-     
+  
      The constructor expects also the equidistant binning in radius \f$ r \f$ to produce the
      jet shape of all bins in a vector and this separately for each jet to allow
      post-selection.
@@ -57,8 +57,8 @@
     //@{
 
     /// Constructor.
-    JetShape(const VetoedFinalState& vfsp, const vector<FourMomentum>& jetaxes, 
-             double rmin=0.0, double rmax=0.7, double interval=0.1, 
+    JetShape(const VetoedFinalState& vfsp, const vector<FourMomentum>& jetaxes,
+             double rmin=0.0, double rmax=0.7, double interval=0.1,
              double r1minPsi=0.3, DeltaRScheme distscheme=RAPIDITY);
 
     /// Clone on the heap.
@@ -72,25 +72,25 @@
     /// Reset projection between events
     void clear();
 
-    
+ 
   public:
 
-    
+ 
     /// Number of equidistant radius bins.
     double numBins() const {
       return _nbins;
     }
-    
+ 
     /// \f$ r_\text{min} \f$ value.
     double rMin() const {
       return _rmin;
     }
-    
+ 
     /// \f$ r_\text{max} \f$ value.
     double rMax() const {
       return _rmax;
     }
-    
+ 
     /// Radius interval size.
     double interval() const {
       return _interval;
@@ -101,29 +101,29 @@
     double diffJetShape(size_t pTbin, size_t rbin) const {
       return _diffjetshapes[pTbin][rbin];
     }
-    
+ 
     /// Return value of integrated jet shape profile histo bin.
     /// @todo Remove this external indexing thing
     double intJetShape(size_t pTbin, size_t rbin) const {
       return _intjetshapes[pTbin][rbin];
     }
-    
+ 
     /// Return value of \f$ \Psi \f$ (integrated jet shape) at given radius for a \f$ p_T \f$ bin.
     /// @todo Remove this external indexing thing
     double psi(size_t pTbin) const {
       return _PsiSlot[pTbin];
     }
-    
+ 
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
- 
-       
+
+    
   private:
 
     /// The jet axes of the jet algorithm projection
@@ -165,7 +165,7 @@
     //@}
   };
 
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/KtJets.hh
==============================================================================
--- trunk/include/Rivet/Projections/KtJets.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/KtJets.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,36 +9,36 @@
 
 
 namespace Rivet {
-  
+
   /// Project out jets based on configurable kT algorithm.
   class KtJets : public Projection {
-    
+ 
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
 
     /// Default constructor. Must specify a FinalState projection which is
     //  assumed to live throughout the run.
     KtJets(const FinalState& fsp)
-      : _pktev(0), _type(4), _angle(2), _recom(1), 
+      : _pktev(0), _type(4), _angle(2), _recom(1),
         _rparameter(1.0)
-    { 
+    {
       setName("KtJets");
       addProjection(fsp, "FS");
     }
 
 
     /// Argument constructor. Allows the to be run with different parameters.
-    /// Must specify a FinalState projection which is assumed to live throughout the run. 
+    /// Must specify a FinalState projection which is assumed to live throughout the run.
     KtJets(const FinalState& fsp, int type, int angle, int recom, double rparameter)
       : _pktev(0), _type(type), _angle(angle), _recom(recom),
         _rparameter(rparameter)
-    { 
+    {
       setName("KtJets");
       addProjection(fsp, "FS");
     }
-    
+ 
 
     /// Clone on the heap.
     virtual const Projection* clone() const {
@@ -47,23 +47,23 @@
 
 
     /// Destructor.
-    virtual ~KtJets() { 
-      delete _pktev; 
+    virtual ~KtJets() {
+      delete _pktev;
     }
     //@}
 
-    
-  protected:   
+ 
+  protected:
 
     /// Perform the projection on the Event.
     void project(const Event& e);
 
     /// Compare projections.
-    int compare(const Projection& p) const;  
+    int compare(const Projection& p) const;
 
 
   public:
-    
+ 
     /// @name Access the projected NJets.
     //@ {
     int getNJets() const { return _pktev->getNJets(); }
@@ -84,11 +84,11 @@
     //@}
 
     /// Get the subjet splitting variables for the given jet.
-    vector<double> getYSubJet(const KtJet::KtLorentzVector& jet) const; 
+    vector<double> getYSubJet(const KtJet::KtLorentzVector& jet) const;
 
 
   private:
-    
+ 
     /// Internal KtEvent, rebuilt every time an event is projected, but not otherwise.
     KtJet::KtEvent* _pktev;
 
@@ -97,9 +97,9 @@
 
     /// Map of vectors of y scales. This is mutable so we can use caching/lazy evaluation.
     mutable map<int, vector<double> > _yscales;
-    
+ 
   };
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/LeadingParticlesFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/LeadingParticlesFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/LeadingParticlesFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,29 +18,29 @@
     /// Constructor: the supplied FinalState projection is assumed to live through the run.
     /// @todo Why specify the rap & pT cuts again?
     LeadingParticlesFinalState(const FinalState& fsp, double mineta=-MAXRAPIDITY, double maxeta=MAXRAPIDITY, double minpt=0.0*GeV)
-      :  FinalState(mineta, maxeta, minpt) 
+      :  FinalState(mineta, maxeta, minpt)
     {
       setName("LeadingParticlesFinalState");
       addProjection(fsp, "FS");
     }
 
-    /// Clone on the heap. 
+    /// Clone on the heap.
     virtual const Projection *clone() const {
       return new LeadingParticlesFinalState(*this);
     }
 
-    /// Add a particle ID to the list of leading particles selected 
+    /// Add a particle ID to the list of leading particles selected
     LeadingParticlesFinalState& addParticleId(long id) {
       _ids.insert(id);
       return *this;
-    } 
+    }
 
-    /// Add a particle ID to the list of leading particles selected 
+    /// Add a particle ID to the list of leading particles selected
     LeadingParticlesFinalState& addParticleIdPair(long id) {
       _ids.insert(id);
       _ids.insert(-id);
       return *this;
-    } 
+    }
 
     // /// Check if a particle of a particular ID was found in the current event
     // bool hasParticleId(const PdgId pid) const;
@@ -48,7 +48,7 @@
     // /// Get a particle of a particular ID (check it exists first)
     // bool get(const PdgId pid) const;
 
-    
+ 
   protected:
 
     /// Apply the projection on the supplied event.

Modified: trunk/include/Rivet/Projections/LossyFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/LossyFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/LossyFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,26 +17,26 @@
   class LossyFinalState : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
 
     /// Constructor from FinalState.
     LossyFinalState(const FinalState& fsp, double lossfraction)
       : _lossFraction(lossfraction)
-    { 
+    {
       setName("LossyFinalState");
       addProjection(fsp, "FS");
       assert(_lossFraction >= 0);
     }
-    
+ 
     /// Stand-alone constructor. Initialises the base FinalState projection.
     LossyFinalState(double lossfraction,
                     double mineta = -MAXRAPIDITY,
                     double maxeta = MAXRAPIDITY,
                     double minpt = 0.0)
       : _lossFraction(lossfraction)
-    { 
+    {
       setName("LossyFinalState");
       addProjection(FinalState(mineta, maxeta, minpt), "FS");
       assert(_lossFraction >= 0);
@@ -50,10 +50,10 @@
     //@}
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -71,10 +71,10 @@
 
     /// Fraction of particles to lose.
     const double _lossFraction;
-    
+ 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/Multiplicity.hh
==============================================================================
--- trunk/include/Rivet/Projections/Multiplicity.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/Multiplicity.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,7 +18,7 @@
     /// Constructor. The provided FinalState projection must live throughout the run.
     Multiplicity(const FinalState& fsp)
       : _totalMult(0), _hadMult(0)
-    { 
+    {
       setName("Multiplicity");
       addProjection(fsp, "FS");
     }

Modified: trunk/include/Rivet/Projections/PVertex.hh
==============================================================================
--- trunk/include/Rivet/Projections/PVertex.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/PVertex.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,16 +6,16 @@
 #include "Rivet/Event.hh"
 #include "Rivet/Particle.hh"
 
-namespace Rivet {  
+namespace Rivet {
+
 
-  
   /// @brief Get the position of the primary vertex of an event.
   ///
   /// HepMC doesn't reliably return the signal process vertex, so
-  /// we have to use the "decay vertex" of the beam particles. 
+  /// we have to use the "decay vertex" of the beam particles.
   /// This gives the right position, within experimental resolution,
   /// but ISR effects can mean that the actual vertex is not right.
-  /// Hence, we don't expose the HepMC GenVertex directly - if it were 
+  /// Hence, we don't expose the HepMC GenVertex directly - if it were
   /// available, people might try to e.g. look at the \f$ p_T \f$
   /// of the vertex children, which would be extremely unreliable.
   class PVertex : public Projection {
@@ -25,8 +25,8 @@
     //@{
     /// The default constructor.
     PVertex()
-      : _thePVertex(0) 
-    { 
+      : _thePVertex(0)
+    {
       setName("PVertex");
     }
 
@@ -43,9 +43,9 @@
       return Vector3(0,0,0);
     }
 
-    
+ 
   protected:
-    
+ 
     /// Do the projection.
     void project(const Event& e);
 
@@ -53,15 +53,15 @@
     int compare(const Projection & p) const {
       return 0;
     }
-    
-  
+ 
+
   private:
 
     /// The Primary Vertex in the current collision.
     GenVertex* _thePVertex;
-    
+ 
   };
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/ParisiTensor.hh
==============================================================================
--- trunk/include/Rivet/Projections/ParisiTensor.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/ParisiTensor.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,25 +12,25 @@
 
   /**
      @brief Calculate the Parisi event shape tensor (or linear momentum tensor).
-     
+  
      The Parisi event shape C and D variables are derived from the eigenvalues of
      the linear momentum tensor
-     \f[ 
-     \theta^{\alpha \beta} = 
+     \f[
+     \theta^{\alpha \beta} =
      \frac{\sum_i \frac{p_i^\alpha p_i^\beta}{|\mathbf{p}_i|}}
-          {\sum_i |\mathbf{p}_i|} 
+          {\sum_i |\mathbf{p}_i|}
      \f]
-     which is actually a linearized (and hence infra-red safe) version of the 
+     which is actually a linearized (and hence infra-red safe) version of the
      {@link Sphericity} tensor.
 
      Defining the three eigenvalues of \f$\theta\f$
-     \f$ \lambda_1 \ge \lambda_2 \ge \lambda_3 \f$, with \f$ \lambda_1 + \lambda_2 + \lambda_3 = 1 \f$, 
+     \f$ \lambda_1 \ge \lambda_2 \ge \lambda_3 \f$, with \f$ \lambda_1 + \lambda_2 + \lambda_3 = 1 \f$,
      the C and D parameters are defined as
-     \f[ 
+     \f[
      C = 3(\lambda_1\lambda_2 + \lambda_1\lambda_3 + \lambda_2\lambda_3)
      \f]
      and
-     \f[ 
+     \f[
      D = 27 \lambda_1\lambda_2\lambda_3
      \f]
 
@@ -77,9 +77,9 @@
     const double lambda2() const { return _lambda[1]; }
     const double lambda3() const { return _lambda[2]; }
     ///@}
-        
+     
   private:
-    
+ 
     /// The Parisi event shape variables.
     double _C, _D;
 

Modified: trunk/include/Rivet/Projections/SVertex.hh
==============================================================================
--- trunk/include/Rivet/Projections/SVertex.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/SVertex.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,7 +14,7 @@
 
   /**
      @brief Determine secondary vertices.
-     
+  
      Makes use of PVertex projection.
 
      @todo Replace function with a functor to improve equality comparisons.
@@ -23,14 +23,14 @@
      Complex cuts on tracks and vertices to validate them have to be provided
      by an external function
      bool f(SVertex&, ParticleVector&, const HepMC::GenVertex&, FourMomentum);
-     which can be embedded in the analysis code. An example can be found 
-     in the S6653332 analysis. A pointer to this function has to be given 
+     which can be embedded in the analysis code. An example can be found
+     in the S6653332 analysis. A pointer to this function has to be given
      to the constructor of the SVertex projection. Its arguments are as follows:
 
      in: reference to instance of SVertex projection, ParticleVector of
          vertex to be analyzed, primary (Gen)Vertex
-     out: FourMomentum = visible Momentum of vertex (selected tracks), 
-     return bool: cuts passed? 1 : 0 
+     out: FourMomentum = visible Momentum of vertex (selected tracks),
+     return bool: cuts passed? 1 : 0
 
      In this way the SVertex projection can be kept as universal/flexible
      as possible.
@@ -45,15 +45,15 @@
 
     /// @name Standard constructors and destructors.
     //@{
-    /// The default constructor. Must specify a PVertex 
+    /// The default constructor. Must specify a PVertex
     /// projection object which is assumed to live through the run.
-    SVertex(const ChargedFinalState& chfs, 
+    SVertex(const ChargedFinalState& chfs,
             const vector<FourMomentum>& jetaxes, double deltaR,
-            double detEta, double IPres, double DLS, double DLSres=0.0) 
+            double detEta, double IPres, double DLS, double DLSres=0.0)
       : _jetaxes(jetaxes), _deltaR(deltaR),
-        _detEta(detEta), _IPres(IPres), _DLS(DLS), 
+        _detEta(detEta), _IPres(IPres), _DLS(DLS),
         _DLSres(DLSres)
-    { 
+    {
       setName("SVertex");
       addProjection(PVertex(), "PV");
       addProjection(chfs, "FS");

Modified: trunk/include/Rivet/Projections/Sphericity.hh
==============================================================================
--- trunk/include/Rivet/Projections/Sphericity.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/Sphericity.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,37 +12,37 @@
 
   /**
      @brief Calculate the sphericity event shape.
-     
-     The sphericity tensor (or quadratic momentum tensor) is defined as 
-     \f[ 
-     S^{\alpha \beta} = \frac{\sum_i p_i^\alpha p_i^\beta}{\sum_i |\mathbf{p}_i|^2} 
+  
+     The sphericity tensor (or quadratic momentum tensor) is defined as
+     \f[
+     S^{\alpha \beta} = \frac{\sum_i p_i^\alpha p_i^\beta}{\sum_i |\mathbf{p}_i|^2}
      \f],
      where the Greek indices are spatial components and the Latin indices are used
      for sums over particles. From this, the sphericity, aplanarity and planarity can be
      calculated by combinations of eigenvalues.
-     
+  
      Defining the three eigenvalues
-     \f$ \lambda_1 \ge \lambda_2 \ge \lambda_3 \f$, with \f$ \lambda_1 + \lambda_2 + \lambda_3 = 1 \f$, 
+     \f$ \lambda_1 \ge \lambda_2 \ge \lambda_3 \f$, with \f$ \lambda_1 + \lambda_2 + \lambda_3 = 1 \f$,
      the sphericity is
-     \f[ 
-     S = \frac{3}{2} (\lambda_2 + \lambda_3) 
+     \f[
+     S = \frac{3}{2} (\lambda_2 + \lambda_3)
      \f]
-     
+  
      The aplanarity is \f$ A = \frac{3}{2}\lambda_3 \f$ and the planarity
-     is \f$ P = \frac{2}{3}(S-2A) = \lambda_2 - \lambda_3 \f$. The eigenvectors define a 
-     set of spatial axes comparable with the thrust axes, but more sensitive to 
+     is \f$ P = \frac{2}{3}(S-2A) = \lambda_2 - \lambda_3 \f$. The eigenvectors define a
+     set of spatial axes comparable with the thrust axes, but more sensitive to
      high momentum particles due to the quadratic sensitivity of the tensor to
      the particle momenta.
-     
+  
      Since the sphericity is quadratic in the particle momenta, it is not an
      infrared safe observable in perturbative QCD. This can be fixed by adding
      a regularizing power of \f$r\f$ to the definition:
-     \f[ 
-     S^{\alpha \beta} = 
+     \f[
+     S^{\alpha \beta} =
      \frac{\sum_i |\mathbf{p}_i|^{r-2} p_i^\alpha p_i^\beta}
-     {\sum_i |\mathbf{p}_i|^r} 
+     {\sum_i |\mathbf{p}_i|^r}
      \f]
-     
+  
      \f$r\f$ is available as a constructor argument on this class and will be
      taken into account by the Cmp<Projection> operation, so a single analysis
      can use several sphericity projections with different \f$r\f$ values without
@@ -119,7 +119,7 @@
     /// @name Direct methods
     /// Ways to do the calculation directly, without engaging the caching system
     //@{
-    
+ 
     /// Manually calculate the sphericity, without engaging the caching system
     void calc(const FinalState& fs);
 

Modified: trunk/include/Rivet/Projections/Thrust.hh
==============================================================================
--- trunk/include/Rivet/Projections/Thrust.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/Thrust.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,35 +11,35 @@
 
 
   /**
-    @brief Obtain the e+ e- thrust event shape, consisting of the thrust basis and the 
+    @brief Obtain the e+ e- thrust event shape, consisting of the thrust basis and the
     thrust scalar values in each direction (the thrust, thrust major and thrust
     minor).
 
     @author Andy Buckley
-   
+
     The scalar (maximum) thrust is defined as
     \f[
     T = \mathrm{max}_{\vec{n}} \frac{\sum_i \left|\vec{p}_i \cdot \vec{n} \right|}{\sum_i |\vec{p}_i|}
     \f],
-    with the direction of the unit vector \f$ \vec{n} \f$ which maximises \f$ T \f$ 
+    with the direction of the unit vector \f$ \vec{n} \f$ which maximises \f$ T \f$
     being identified as the thrust axis. The unit vector which maximises the thrust
     scalar in the plane perpendicular to \f$ \vec{n} \f$ is the "thrust major"
     direction, and the vector perpendicular to both the thrust and thrust major directions
-    is the thrust minor. Both the major and minor directions have associated thrust 
+    is the thrust minor. Both the major and minor directions have associated thrust
     scalars.
 
     Thrust calculations have particularly simple forms for less than 4 particles, and
     in those cases this projection is computationally minimal. For 4 or more particles,
-    a more general calculation must be carried out, based on the Brandt/Dahmen method 
+    a more general calculation must be carried out, based on the Brandt/Dahmen method
     from Z. Phys. C1 (1978). While a polynomial improvement on the exponential scaling
-    of the naive method, this algorithm scales asymptotically as 
-    \f$ \mathcal{O}\left( n^3 \right) \f$. Be aware that the thrust may easily be the 
+    of the naive method, this algorithm scales asymptotically as
+    \f$ \mathcal{O}\left( n^3 \right) \f$. Be aware that the thrust may easily be the
     most computationally demanding projection in Rivet for large events!
 
     The Rivet implementation of thrust is based heavily on Stefan Gieseke's Herwig++
     re-coding of the 'tasso' code from HERWIG.
 
-    NB. special case with >= 4 coplanar particles will still fail. 
+    NB. special case with >= 4 coplanar particles will still fail.
     NB. Thrust assumes all momenta are in the CoM system: no explicit boost is performed.
       This can be dealt with by appropriate choice of the supplied FinalState.
    */
@@ -63,14 +63,14 @@
 
     /// Perform the projection on the Event
     void project(const Event& e) {
-      const vector<Particle> ps 
+      const vector<Particle> ps
         = applyProjection<FinalState>(e, "FS").particles();
       calc(ps);
     }
 
     /// Compare projections
-    int compare(const Projection& p) const { 
-      return mkNamedPCmp(p, "FS"); 
+    int compare(const Projection& p) const {
+      return mkNamedPCmp(p, "FS");
     }
 
 
@@ -117,7 +117,7 @@
 
     /// Manually calculate the thrust, without engaging the caching system
     void calc(const vector<FourMomentum>& fsmomenta);
-      
+   
     /// Manually calculate the thrust, without engaging the caching system
     void calc(const vector<Vector3>& threeMomenta);
 
@@ -141,7 +141,7 @@
     void _calcThrust(const vector<Vector3>& fsmomenta);
 
   };
-  
+
 }
 
 #endif

Modified: trunk/include/Rivet/Projections/TotalVisibleMomentum.hh
==============================================================================
--- trunk/include/Rivet/Projections/TotalVisibleMomentum.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/TotalVisibleMomentum.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,15 +11,15 @@
 namespace Rivet {
 
 
-  /// Project out the total visible energy vector, allowing missing 
+  /// Project out the total visible energy vector, allowing missing
   /// \f$ E_T \f$ etc. to be calculated.
   class TotalVisibleMomentum : public Projection {
-    
+ 
   public:
-    
+ 
     /// Constructor. Make sure you supply an appropriately vetoed FS!
     TotalVisibleMomentum(const FinalState& fsp)
-    { 
+    {
       setName("TotalVisibleMomentum");
       addProjection(fsp, "FS");
     }
@@ -29,7 +29,7 @@
       return new TotalVisibleMomentum(*this);
     }
 
-    
+ 
   public:
     /// The projected four-momentum vector
     FourMomentum& momentum() { return _momentum; }
@@ -39,26 +39,26 @@
 
     /// The projected scalar transverse energy
     const double scalarET() const { return _set; }
-    
+ 
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
-        
+     
   private:
-    
+ 
     /// The total visible momentum
     FourMomentum _momentum;
-    
+ 
     /// Scalar transverse energy
     double _set;
-    
+ 
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/TriggerCDFRun0Run1.hh
==============================================================================
--- trunk/include/Rivet/Projections/TriggerCDFRun0Run1.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/TriggerCDFRun0Run1.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,9 +13,9 @@
   /// Project out the incoming beams
   class TriggerCDFRun0Run1 : public Projection {
   public:
-    
+ 
     /// Default constructor.
-    TriggerCDFRun0Run1() { 
+    TriggerCDFRun0Run1() {
       setName("TriggerCDFRun0Run1");
 
       addProjection(ChargedFinalState(-5.9, 5.9), "CFS");

Modified: trunk/include/Rivet/Projections/TriggerUA5.hh
==============================================================================
--- trunk/include/Rivet/Projections/TriggerUA5.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/TriggerUA5.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,9 +13,9 @@
   /// Project out the incoming beams
   class TriggerUA5 : public Projection {
   public:
-    
+ 
     /// Default constructor.
-    TriggerUA5() { 
+    TriggerUA5() {
       setName("TriggerUA5");
 
       addProjection(Beam(), "Beam");
@@ -43,7 +43,7 @@
       return _decision_nsd_1;
     }
 
-    /// The trigger result for non-single diffractive (2 arm) trigger 
+    /// The trigger result for non-single diffractive (2 arm) trigger
     /// with special ">= 2" trigger for ppbar bg rejection
     const bool nsd2Decision() const {
       return _decision_nsd_2;

Modified: trunk/include/Rivet/Projections/UnstableFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/UnstableFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/UnstableFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,9 +11,9 @@
 
   /// Project out all final-state particles in an event.
   class UnstableFinalState : public Projection {
-    
+ 
   public:
-    
+ 
     /// @name Standard constructors and destructors.
     //@{
     /// The default constructor. May specify the minimum and maximum
@@ -22,7 +22,7 @@
                        double maxeta =  MAXRAPIDITY,
                        double minpt  =  0.0*GeV)
       : _etamin(mineta), _etamax(maxeta), _ptmin(minpt)
-    { 
+    {
       setName("UnstableFinalState");
       // addCut("eta", MORE_EQ, mineta);
       // addCut("eta", LESS_EQ, maxeta);
@@ -35,7 +35,7 @@
       return new UnstableFinalState(*this);
     }
     //@}
-        
+     
     /// Access the projected final-state particles.
     virtual const ParticleVector& particles() const { return _theParticles; }
 
@@ -43,29 +43,29 @@
     virtual const bool empty() const { return _theParticles.empty(); }
 
   protected:
-    
+ 
     /// Apply the projection to the event.
     virtual void project(const Event& e);
-    
+ 
     /// Compare projections.
     virtual int compare(const Projection& p) const;
-    
-  protected:
  
+  protected:
+
     /// The minimum allowed pseudorapidity.
     double _etamin;
-    
+ 
     /// The maximum allowed pseudorapidity.
     double _etamax;
-    
+ 
     /// The minimum allowed transverse momentum.
     double _ptmin;
-    
+ 
     /// The final-state particles.
     ParticleVector _theParticles;
-    
+ 
   };
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/VetoedFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/VetoedFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/VetoedFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,7 +17,7 @@
 
   public:
 
-    /// Typedef for a pair of back-to-back cuts.    
+    /// Typedef for a pair of back-to-back cuts.
     typedef pair<double, double> BinaryCut;
 
     /// Typedef for a vetoing entry.
@@ -26,7 +26,7 @@
     /// Typedef for a veto on a composite particle mass.
     typedef multimap<int, BinaryCut>  CompositeVeto;
 
-    
+ 
     /// @name Constructors
     //@{
     /// Default constructor.
@@ -67,7 +67,7 @@
       return new VetoedFinalState(*this);
     }
     //@}
-    
+ 
 
   public:
 
@@ -75,8 +75,8 @@
     const VetoDetails& vetoDetails() const {
       return _vetoCodes;
     }
-  
-    /// Add a particle ID and \f$ p_T \f$ range to veto. Particles with \f$ p_T \f$ 
+
+    /// Add a particle ID and \f$ p_T \f$ range to veto. Particles with \f$ p_T \f$
     /// IN the given range will be rejected.
     VetoedFinalState& addVetoDetail(const long id, const double ptmin, const double ptmax) {
       BinaryCut ptrange(ptmin, ptmax);
@@ -92,7 +92,7 @@
       return *this;
     }
 
-    /// Add a particle/antiparticle pair to veto. Given a single ID, both the particle and its corresponding 
+    /// Add a particle/antiparticle pair to veto. Given a single ID, both the particle and its corresponding
     /// antiparticle (for all \f$ p_T \f$ values) will be vetoed.
     VetoedFinalState& addVetoPairId(const long id) {
       addVetoId(id);
@@ -125,15 +125,15 @@
       _nCompositeDecays.insert(nProducts);
       return *this;
     }
-    
+ 
     /// Veto the decay products of particle with pdg id
-    /// @todo Need HepMC to sort themselves out and keep vector bosons from 
+    /// @todo Need HepMC to sort themselves out and keep vector bosons from
     /// the hard vtx in the event record before this will work reliably for all pdg ids
     VetoedFinalState& addDecayProductsVeto(const long id){
       _parentVetoes.insert(id);
       return *this;
     }
-    
+ 
     /// Set the list of particle IDs and \f$ p_T \f$ ranges to veto.
     VetoedFinalState& setVetoDetails(const VetoDetails& ids) {
       _vetoCodes = ids;
@@ -145,7 +145,7 @@
       _vetoCodes.clear();
       return *this;
     }
-    
+ 
 
     /// Veto particles from a supplied final state.
     VetoedFinalState& addVetoOnThisFinalState(FinalState& fs) {
@@ -159,10 +159,10 @@
 
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -171,11 +171,11 @@
 
     /// The final-state particles.
     VetoDetails _vetoCodes;
-    
+ 
     /// Composite particle masses to veto
     CompositeVeto _compositeVetoes;
     set<int> _nCompositeDecays;
-    
+ 
     typedef set<long> ParentVetos;
 
     /// Set of decaying particle IDs to veto
@@ -186,7 +186,7 @@
 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/VisibleFinalState.hh
==============================================================================
--- trunk/include/Rivet/Projections/VisibleFinalState.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/VisibleFinalState.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,7 +17,7 @@
   class VisibleFinalState : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
     /// Default constructor.
@@ -54,19 +54,19 @@
       return new VisibleFinalState(*this);
     }
     //@}
-    
+ 
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/WFinder.hh
==============================================================================
--- trunk/include/Rivet/Projections/WFinder.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/WFinder.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,7 +17,7 @@
   class WFinder : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
 
@@ -58,19 +58,19 @@
 
 
     /// Access to the remaining particles, after the Z and clustered photons
-    /// have been removed from the full final state 
+    /// have been removed from the full final state
     /// (e.g. for running a jet finder on it)
     const FinalState& remainingFinalState() const;
 
-    /// Access to the Z constituent leptons final state 
+    /// Access to the Z constituent leptons final state
     /// (e.g. for more fine-grained cuts on the leptons)
     const FinalState& constituentsFinalState() const;
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -91,7 +91,7 @@
 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/Projections/ZFinder.hh
==============================================================================
--- trunk/include/Rivet/Projections/ZFinder.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Projections/ZFinder.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,7 +17,7 @@
   class ZFinder : public FinalState {
 
   public:
-    
+ 
     /// @name Constructors
     //@{
 
@@ -58,19 +58,19 @@
 
 
     /// Access to the remaining particles, after the Z and clustered photons
-    /// have been removed from the full final state 
+    /// have been removed from the full final state
     /// (e.g. for running a jet finder on it)
     const FinalState& remainingFinalState() const;
 
-    /// Access to the Z constituent leptons final state 
+    /// Access to the Z constituent leptons final state
     /// (e.g. for more fine-grained cuts on the leptons)
     const FinalState& constituentsFinalState() const;
 
   protected:
-    
+ 
     /// Apply the projection on the supplied event.
     void project(const Event& e);
-    
+ 
     /// Compare projections.
     int compare(const Projection& p) const;
 
@@ -90,7 +90,7 @@
 
   };
 
-  
+
 }
 
 

Modified: trunk/include/Rivet/RivetAIDA.hh
==============================================================================
--- trunk/include/Rivet/RivetAIDA.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/RivetAIDA.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,5 +1,5 @@
-#ifndef RIVET_RIVETAIDA_HH 
-#define RIVET_RIVETAIDA_HH 
+#ifndef RIVET_RIVETAIDA_HH
+#define RIVET_RIVETAIDA_HH
 
 /// @author Andy Buckley
 /// @date   2009-01-30
@@ -27,8 +27,8 @@
   /// Function to get a map of all the bin edge vectors in a paper with the
   /// given @a papername.
   const map<string, BinEdges> getBinEdges(string papername);
-  
-  const map<string, BinEdges> 
+
+  const map<string, BinEdges>
   getBinEdges(const map<string, vector<DPSXPoint> >& xpoints);
 
   const map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername);
@@ -52,12 +52,12 @@
   /// Return the integral over the histogram bins assuming it has been
   // normalize()d.
   inline double integral(AIDA::IHistogram1D* histo) {
-    double intg = 0.; 
+    double intg = 0.;
     for ( int i = 0; i < histo->axis().bins(); ++i )
       intg += histo->binHeight(i) * histo->axis().binWidth(i);
     return intg;
   }
-  
+
 
 
   using AIDA::IHistogram1D;

Modified: trunk/include/Rivet/Run.hh
==============================================================================
--- trunk/include/Rivet/Run.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Run.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -65,12 +65,12 @@
 
     //@}
 
-    
+ 
   private:
 
     /// AnalysisHandler object
     AnalysisHandler& _ah;
-    
+ 
     /// @name Run variables obtained from events or command line
     //@{
 
@@ -93,7 +93,7 @@
     /// @name HepMC I/O members
     //@{
 
-    /// HepMC's own reader from streams   
+    /// HepMC's own reader from streams
     HepMC::IO_GenEvent* m_io;
 
     /// STL istream, used by IO_GenEvent if input is not a file

Modified: trunk/include/Rivet/Tools/BinnedHistogram.hh
==============================================================================
--- trunk/include/Rivet/Tools/BinnedHistogram.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/BinnedHistogram.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,52 +12,52 @@
 
 namespace Rivet{
 
-  
+
   /**
    * BinnedHistogram contains a series of histograms of the same quantity
-   * each in a different region of a second quantity.  For example, a 
-   * BinnedHistogram may contain histograms of the cross section differential 
+   * each in a different region of a second quantity.  For example, a
+   * BinnedHistogram may contain histograms of the cross section differential
    * in PT in different eta regions.
    *
    **/
-  
+
   template<typename T> class BinnedHistogram{
-    
+ 
   public:
-    
+ 
     /**
      * Create a new empty BinnedHistogram
      */
-    
+ 
     BinnedHistogram(){return;};
-    
+ 
     /**
-     *  Add a histogram in the region between binMin and binMax to this set of 
+     *  Add a histogram in the region between binMin and binMax to this set of
      *  BinnedHistograms.
      */
-    
-    const BinnedHistogram<T> &addHistogram(const T &binMin, 
-                                        const T &binMax, 
+ 
+    const BinnedHistogram<T> &addHistogram(const T &binMin,
+                                        const T &binMax,
                                         AIDA::IHistogram1D *histo);
-    
+ 
     /**
-     *  Fill the histogram that lies in the same region as bin with the 
+     *  Fill the histogram that lies in the same region as bin with the
      *  value val of weight weight.
      */
-    
+ 
     AIDA::IHistogram1D* const fill(const T &bin,
                                 const T &val,
                                 const double &weight);
-    
+ 
     const vector<AIDA::IHistogram1D*> &getHistograms() const { return _histos; }
     vector<AIDA::IHistogram1D*> &getHistograms() { return _histos; }
-    
+ 
   private:
-    
+ 
     map<T, AIDA::IHistogram1D*> _histosByUpperBound;
     map<T, AIDA::IHistogram1D*> _histosByLowerBound;
     vector<AIDA::IHistogram1D*> _histos;
-    
+ 
   };
 }
 

Modified: trunk/include/Rivet/Tools/Configuration.hh
==============================================================================
--- trunk/include/Rivet/Tools/Configuration.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/Configuration.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,5 +1,5 @@
 // -*- C++ -*-
-#ifndef RIVET_CONFIGURATION_HH 
+#ifndef RIVET_CONFIGURATION_HH
 #define RIVET_CONFIGURATION_HH
 
 #include "Rivet/Rivet.hh"
@@ -26,10 +26,10 @@
   public:
     /// Standard constructor
     Configuration() :
-      numEvents(0), generatorName(""), beam1(Rivet::PROTON), beam2(Rivet::PROTON), 
-      mom1(7000.0), mom2(7000.0), histoName("Rivet"), histoFormat(Rivet::AIDAML), 
-      hepmlInFile(""), hepmlOutFile(""), hepmcInFile(""), hepmcOutFile(""), 
-      useLogColors(true), runRivet(false), readHepMC(false), writeHepMC(false), 
+      numEvents(0), generatorName(""), beam1(Rivet::PROTON), beam2(Rivet::PROTON),
+      mom1(7000.0), mom2(7000.0), histoName("Rivet"), histoFormat(Rivet::AIDAML),
+      hepmlInFile(""), hepmlOutFile(""), hepmcInFile(""), hepmcOutFile(""),
+      useLogColors(true), runRivet(false), readHepMC(false), writeHepMC(false),
       params(), analyses(), rngSeed(314159)
     { }
 

Modified: trunk/include/Rivet/Tools/Logging.hh
==============================================================================
--- trunk/include/Rivet/Tools/Logging.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/Logging.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,4 +1,4 @@
-#ifndef RIVET_LOGGING_HH 
+#ifndef RIVET_LOGGING_HH
 #define RIVET_LOGGING_HH
 
 #include "Rivet/Rivet.hh"
@@ -88,7 +88,7 @@
     static std::string getColorCode(int level);
 
   public:
-    /// Get a logger with the given name. The level will be taken from the 
+    /// Get a logger with the given name. The level will be taken from the
     /// "requestedLevels" static map or will be INFO by default.
     static Log& getLog(const std::string& name);
 
@@ -131,7 +131,7 @@
     void trace(const std::string& message) { log(TRACE, message); }
 
     void debug(const std::string& message) { log(DEBUG, message); }
-    
+ 
     void info(const std::string& message) { log(INFO, message); }
 
     void warn(const std::string& message) { log(WARN, message); }
@@ -142,10 +142,10 @@
   private:
     /// This logger's name
     std::string _name;
-    
+ 
     /// Threshold level for this logger.
     int _level;
-    
+ 
   protected:
     /// Write a message at a particular level.
     void log(int level, const std::string& message);
@@ -163,10 +163,10 @@
     friend std::ostream& operator<<(Log& log, int level);
 
   };
-  
+
   /// Streaming output to a logger must have a Log::Level/int as its first argument.
   std::ostream& operator<<(Log& log, int level);
-  
+
 }
 
 

Modified: trunk/include/Rivet/Tools/ParticleIdUtils.hh
==============================================================================
--- trunk/include/Rivet/Tools/ParticleIdUtils.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/ParticleIdUtils.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -7,10 +7,10 @@
 //
 //  In the standard numbering scheme, the PID digits (base 10) are:
 //            +/- n nr nl nq1 nq2 nq3 nj
-//  It is expected that any 7 digit number used as a PID will adhere to 
+//  It is expected that any 7 digit number used as a PID will adhere to
 //  the Monte Carlo numbering scheme documented by the PDG.
-//  Note that many "new" particles not explicitly defined already 
-//  can be expressed within this numbering scheme. 
+//  Note that many "new" particles not explicitly defined already
+//  can be expressed within this numbering scheme.
 //
 //  These are the same methods that can be found in HepPDT::ParticleID
 // ----------------------------------------------------------------------
@@ -28,15 +28,15 @@
 unsigned short digit( location loc, const int & pid );
 
 /// if this is a nucleus (ion), get A
-/// Ion numbers are +/- 10LZZZAAAI. 
+/// Ion numbers are +/- 10LZZZAAAI.
 int A(const int & pid );
 
 /// if this is a nucleus (ion), get Z
-/// Ion numbers are +/- 10LZZZAAAI. 
+/// Ion numbers are +/- 10LZZZAAAI.
 int Z(const int & pid );
 
 /// if this is a nucleus (ion), get nLambda
-/// Ion numbers are +/- 10LZZZAAAI. 
+/// Ion numbers are +/- 10LZZZAAAI.
 int lambda( const int & pid );
 
 /// absolute value of particle ID
@@ -47,7 +47,7 @@
 /// if this is a fundamental particle, does it have a valid antiparticle?
 //bool hasFundamentalAnti( const int & pid );
 
-/// returns everything beyond the 7th digit 
+/// returns everything beyond the 7th digit
 /// (e.g. outside the standard numbering scheme)
 int extraBits( const int & pid );
 

Modified: trunk/include/Rivet/Tools/TypeTraits.hh
==============================================================================
--- trunk/include/Rivet/Tools/TypeTraits.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/TypeTraits.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -4,13 +4,13 @@
 
 namespace Rivet {
 
-  /// Mechanisms to allow references and pointers to templated types 
+  /// Mechanisms to allow references and pointers to templated types
   /// to be distinguished from one another (since C++ doesn't allow
   /// partial template specialisation for functions.
-  /// Traits methods use specialisation of class/struct templates, and 
-  /// some trickery with typedefs and static const integral types (or 
+  /// Traits methods use specialisation of class/struct templates, and
+  /// some trickery with typedefs and static const integral types (or
   /// enums) to implement partial function specialisation as a work-around.
-  
+
   struct RefType { };
   struct PtrType { };
 

Modified: trunk/include/Rivet/Tools/Utils.hh
==============================================================================
--- trunk/include/Rivet/Tools/Utils.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/Utils.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,10 +15,10 @@
   inline int nocase_cmp(const string& s1, const string& s2) {
     string::const_iterator it1 = s1.begin();
     string::const_iterator it2 = s2.begin();
-    while ( (it1 != s1.end()) && (it2 != s2.end()) ) { 
+    while ( (it1 != s1.end()) && (it2 != s2.end()) ) {
       if(::toupper(*it1) != ::toupper(*it2)) { // < Letters differ?
         // Return -1 to indicate smaller than, 1 otherwise
-        return (::toupper(*it1) < ::toupper(*it2)) ? -1 : 1; 
+        return (::toupper(*it1) < ::toupper(*it2)) ? -1 : 1;
       }
       // Proceed to the next character in each string
       ++it1;
@@ -33,14 +33,14 @@
 
   inline string toLower(const string& s) {
     string out = s;
-    transform(out.begin(), out.end(), out.begin(), (int(*)(int)) tolower); 
+    transform(out.begin(), out.end(), out.begin(), (int(*)(int)) tolower);
     return out;
   }
 
 
   inline string toUpper(const string& s) {
     string out = s;
-    std::transform(out.begin(), out.end(), out.begin(), (int(*)(int)) toupper); 
+    std::transform(out.begin(), out.end(), out.begin(), (int(*)(int)) toupper);
     return out;
   }
 
@@ -56,7 +56,7 @@
     return s.substr(s.length() - end.length()) == end;
   }
 
-  /// Split a string with single-character delimiters, ignoring zero-length 
+  /// Split a string with single-character delimiters, ignoring zero-length
   /// substrings. Designed for getting elements of filesystem paths, naturally.
   inline vector<string> split(string path, const string delim = ":") {
     vector<string> dirs;
@@ -123,7 +123,7 @@
 
   template <typename T>
   inline string join(const vector<T>& v, const string& sep = " ") {
-    stringstream out; 
+    stringstream out;
     for (size_t i = 0; i < v.size(); ++i) {
       if (i != 0) out << sep;
       out << v[i];

Modified: trunk/include/Rivet/Tools/osdir.hh
==============================================================================
--- trunk/include/Rivet/Tools/osdir.hh	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/Rivet/Tools/osdir.hh	Thu Nov 19 15:02:51 2009	(r2080)
@@ -165,7 +165,7 @@
  * functions and data) to form executables.
  *
  *                             NO WARRANTY
- *                              
+ *
  * 1. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
  * WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
  * EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
@@ -175,7 +175,7 @@
  * PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
  * LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
  * THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
- * 
+ *
  * 2. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
  * WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
  * AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
@@ -186,7 +186,7 @@
  * FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
  * SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  * DAMAGES.
- * 
+ *
  * END OF TERMS AND CONDITIONS
  *
  */

Modified: trunk/include/TinyXML/tinyxml.h
==============================================================================
--- trunk/include/TinyXML/tinyxml.h	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/include/TinyXML/tinyxml.h	Thu Nov 19 15:02:51 2009	(r2080)
@@ -86,7 +86,7 @@
 const int TIXML_MINOR_VERSION = 5;
 const int TIXML_PATCH_VERSION = 2;
 
-/*	Internal structure for tracking location of items 
+/*	Internal structure for tracking location of items
 	in the XML file.
 */
 struct TiXmlCursor
@@ -108,7 +108,7 @@
 	If you return 'true' from a Visit method, recursive parsing will continue. If you return
 	false, <b>no children of this node or its sibilings</b> will be Visited.
 
-	All flavors of Visit methods have a default implementation that returns 'true' (continue 
+	All flavors of Visit methods have a default implementation that returns 'true' (continue
 	visiting). You need to only override methods that are interesting to you.
 
 	Generally Accept() is called on the TiXmlDocument, although all nodes suppert Visiting.
@@ -143,8 +143,8 @@
 };
 
 // Only used by Attribute::Query functions
-enum 
-{ 
+enum
+{
 	TIXML_SUCCESS,
 	TIXML_NO_ATTRIBUTE,
 	TIXML_WRONG_TYPE
@@ -197,7 +197,7 @@
 		or the string class (TiXmlString in non-STL mode, std::string
 		in STL mode.) Either or both cfile and str can be null.
 		
-		This is a formatted print, and will insert 
+		This is a formatted print, and will insert
 		tabs and newlines.
 		
 		(For an unformatted stream, use the << operator.)
@@ -244,8 +244,8 @@
 	// in the UTF-8 sequence.
 	static const int utf8ByteTable[256];
 
-	virtual const char* Parse(	const char* p, 
-								TiXmlParsingData* data, 
+	virtual const char* Parse(	const char* p,
+								TiXmlParsingData* data,
 								TiXmlEncoding encoding /*= TIXML_ENCODING_UNKNOWN */ ) = 0;
 
 	enum
@@ -275,8 +275,8 @@
 
 	static const char* SkipWhiteSpace( const char*, TiXmlEncoding encoding );
 	inline static bool IsWhiteSpace( char c )		
-	{ 
-		return ( isspace( (unsigned char) c ) || c == '\n' || c == '\r' ); 
+	{
+		return ( isspace( (unsigned char) c ) || c == '\n' || c == '\r' );
 	}
 	inline static bool IsWhiteSpace( int c )
 	{
@@ -428,7 +428,7 @@
 		    The operator<< and operator>> are not completely symmetric. Writing
 		    a node to a stream is very well defined. You'll get a nice stream
 		    of output, without any extra whitespace or newlines.
-		    
+		
 		    But reading is not as well defined. (As it always is.) If you create
 		    a TiXmlElement (for example) and read that from an input stream,
 		    the text needs to define an element or junk will result. This is
@@ -680,11 +680,11 @@
 	virtual TiXmlDeclaration*       ToDeclaration() { return 0; } ///< Cast to a more defined type. Will return null if not of the requested type.
 
 	/** Create an exact duplicate of this node and return it. The memory must be deleted
-		by the caller. 
+		by the caller.
 	*/
 	virtual TiXmlNode* Clone() const = 0;
 
-	/** Accept a hierchical visit the nodes in the TinyXML DOM. Every node in the 
+	/** Accept a hierchical visit the nodes in the TinyXML DOM. Every node in the
 		XML tree will be conditionally visited and the host will be called back
 		via the TiXmlVisitor interface.
 
@@ -695,7 +695,7 @@
 		The interface has been based on ideas from:
 
 		- http://www.saxproject.org/
-		- http://c2.com/cgi/wiki?HierarchicalVisitorPattern 
+		- http://c2.com/cgi/wiki?HierarchicalVisitorPattern
 
 		Which are both good references for "visiting".
 
@@ -788,7 +788,7 @@
 
 	/** QueryIntValue examines the value string. It is an alternative to the
 		IntValue() method with richer error checking.
-		If the value is an integer, it is stored in 'value' and 
+		If the value is an integer, it is stored in 'value' and
 		the call returns TIXML_SUCCESS. If it is not
 		an integer, it returns TIXML_WRONG_TYPE.
 
@@ -813,13 +813,13 @@
 	/// Get the next sibling attribute in the DOM. Returns null at end.
 	const TiXmlAttribute* Next() const;
 	TiXmlAttribute* Next() {
-		return const_cast< TiXmlAttribute* >( (const_cast< const TiXmlAttribute* >(this))->Next() ); 
+		return const_cast< TiXmlAttribute* >( (const_cast< const TiXmlAttribute* >(this))->Next() );
 	}
 
 	/// Get the previous sibling attribute in the DOM. Returns null at beginning.
 	const TiXmlAttribute* Previous() const;
 	TiXmlAttribute* Previous() {
-		return const_cast< TiXmlAttribute* >( (const_cast< const TiXmlAttribute* >(this))->Previous() ); 
+		return const_cast< TiXmlAttribute* >( (const_cast< const TiXmlAttribute* >(this))->Previous() );
 	}
 
 	bool operator==( const TiXmlAttribute& rhs ) const { return rhs.name == name; }
@@ -941,7 +941,7 @@
 
 	/** QueryIntAttribute examines the attribute - it is an alternative to the
 		Attribute() method with richer error checking.
-		If the attribute is an integer, it is stored in 'value' and 
+		If the attribute is an integer, it is stored in 'value' and
 		the call returns TIXML_SUCCESS. If it is not
 		an integer, it returns TIXML_WRONG_TYPE. If the attribute
 		does not exist, then TIXML_NO_ATTRIBUTE is returned.
@@ -1027,23 +1027,23 @@
 		const char* str = fooElement->GetText();
 		@endverbatim
 
-		'str' will be a pointer to "This is text". 
+		'str' will be a pointer to "This is text".
 		
 		Note that this function can be misleading. If the element foo was created from
 		this XML:
 		@verbatim
-		<foo><b>This is text</b></foo> 
+		<foo><b>This is text</b></foo>
 		@endverbatim
 
 		then the value of str would be null. The first child node isn't a text node, it is
 		another element. From this XML:
 		@verbatim
-		<foo>This is <b>text</b></foo> 
+		<foo>This is <b>text</b></foo>
 		@endverbatim
 		GetText() will return "This is ".
 
-		WARNING: GetText() accesses a child node - don't become confused with the 
-				 similarly named TiXmlHandle::Text() and TiXmlNode::ToText() which are 
+		WARNING: GetText() accesses a child node - don't become confused with the
+				 similarly named TiXmlHandle::Text() and TiXmlNode::ToText() which are
 				 safe type casts on the referenced node.
 	*/
 	const char* GetText() const;
@@ -1061,7 +1061,7 @@
 	virtual const TiXmlElement*     ToElement()     const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlElement*           ToElement()	          { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* visitor ) const;
 
@@ -1114,7 +1114,7 @@
 	virtual const TiXmlComment*  ToComment() const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlComment*  ToComment() { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* visitor ) const;
 
@@ -1131,16 +1131,16 @@
 };
 
 
-/** XML text. A text node can have 2 ways to output the next. "normal" output 
+/** XML text. A text node can have 2 ways to output the next. "normal" output
 	and CDATA. It will default to the mode it was parsed from the XML file and
-	you generally want to leave it alone, but you can change the output mode with 
+	you generally want to leave it alone, but you can change the output mode with
 	SetCDATA() and query it with CDATA().
 */
 class TiXmlText : public TiXmlNode
 {
 	friend class TiXmlElement;
 public:
-	/** Constructor for text element. By default, it is treated as 
+	/** Constructor for text element. By default, it is treated as
 		normal, encoded text. If you want it be output as a CDATA text
 		element, set the parameter _cdata to 'true'
 	*/
@@ -1174,7 +1174,7 @@
 	virtual const TiXmlText* ToText() const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlText*       ToText()       { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* content ) const;
 
@@ -1246,7 +1246,7 @@
 	virtual const TiXmlDeclaration* ToDeclaration() const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlDeclaration*       ToDeclaration()       { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* visitor ) const;
 
@@ -1289,7 +1289,7 @@
 	virtual const TiXmlUnknown*     ToUnknown()     const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlUnknown*           ToUnknown()	    { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* content ) const;
 
@@ -1384,7 +1384,7 @@
 	*/
 	int ErrorId()	const				{ return errorId; }
 
-	/** Returns the location (if known) of the error. The first column is column 1, 
+	/** Returns the location (if known) of the error. The first column is column 1,
 		and the first row is row 1. A value of 0 means the row and column wasn't applicable
 		(memory errors, for example, have no row/column) or the parser lost the error. (An
 		error in the error reporting, in that case.)
@@ -1425,11 +1425,11 @@
 	/** If you have handled the error, it can be reset with this call. The error
 		state is automatically cleared if you Parse a new XML block.
 	*/
-	void ClearError()						{	error = false; 
-												errorId = 0; 
-												errorDesc = ""; 
-												errorLocation.row = errorLocation.col = 0; 
-												//errorLocation.last = 0; 
+	void ClearError()						{	error = false;
+												errorId = 0;
+												errorDesc = "";
+												errorLocation.row = errorLocation.col = 0;
+												//errorLocation.last = 0;
 											}
 
 	/** Write the document to standard out using formatted printing ("pretty print"). */
@@ -1439,7 +1439,7 @@
 		will allocate a character array (new char[]) and return it as a pointer. The
 		calling code pust call delete[] on the return char* to avoid a memory leak.
 	*/
-	//char* PrintToMemory() const; 
+	//char* PrintToMemory() const;
 
 	/// Print this Document to a FILE stream.
 	virtual void Print( FILE* cfile, int depth = 0 ) const;
@@ -1449,7 +1449,7 @@
 	virtual const TiXmlDocument*    ToDocument()    const { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 	virtual TiXmlDocument*          ToDocument()          { return this; } ///< Cast to a more defined type. Will return null not of the requested type.
 
-	/** Walk the XML tree visiting this node and all of its children. 
+	/** Walk the XML tree visiting this node and all of its children.
 	*/
 	virtual bool Accept( TiXmlVisitor* content ) const;
 
@@ -1485,7 +1485,7 @@
 	<Document>
 	@endverbatim
 
-	Assuming you want the value of "attributeB" in the 2nd "Child" element, it's very 
+	Assuming you want the value of "attributeB" in the 2nd "Child" element, it's very
 	easy to write a *lot* of code that looks like:
 
 	@verbatim
@@ -1505,7 +1505,7 @@
 	@endverbatim
 
 	And that doesn't even cover "else" cases. TiXmlHandle addresses the verbosity
-	of such code. A TiXmlHandle checks for null	pointers so it is perfectly safe 
+	of such code. A TiXmlHandle checks for null	pointers so it is perfectly safe
 	and correct to use:
 
 	@verbatim
@@ -1526,7 +1526,7 @@
 	What they should not be used for is iteration:
 
 	@verbatim
-	int i=0; 
+	int i=0;
 	while ( true )
 	{
 		TiXmlElement* child = docHandle.FirstChild( "Document" ).FirstChild( "Element" ).Child( "Child", i ).ToElement();
@@ -1537,8 +1537,8 @@
 	}
 	@endverbatim
 
-	It seems reasonable, but it is in fact two embedded while loops. The Child method is 
-	a linear walk to find the element, so this code would iterate much more than it needs 
+	It seems reasonable, but it is in fact two embedded while loops. The Child method is
+	a linear walk to find the element, so this code would iterate much more than it needs
 	to. Instead, prefer:
 
 	@verbatim
@@ -1568,20 +1568,20 @@
 	/// Return a handle to the first child element with the given name.
 	TiXmlHandle FirstChildElement( const char * value ) const;
 
-	/** Return a handle to the "index" child with the given name. 
+	/** Return a handle to the "index" child with the given name.
 		The first child is 0, the second 1, etc.
 	*/
 	TiXmlHandle Child( const char* value, int index ) const;
-	/** Return a handle to the "index" child. 
+	/** Return a handle to the "index" child.
 		The first child is 0, the second 1, etc.
 	*/
 	TiXmlHandle Child( int index ) const;
-	/** Return a handle to the "index" child element with the given name. 
+	/** Return a handle to the "index" child element with the given name.
 		The first child element is 0, the second 1, etc. Note that only TiXmlElements
 		are indexed: other types are not counted.
 	*/
 	TiXmlHandle ChildElement( const char* value, int index ) const;
-	/** Return a handle to the "index" child element. 
+	/** Return a handle to the "index" child element.
 		The first child element is 0, the second 1, etc. Note that only TiXmlElements
 		are indexed: other types are not counted.
 	*/
@@ -1595,7 +1595,7 @@
 
 	/** Return the handle as a TiXmlNode. This may return null.
 	*/
-	TiXmlNode* ToNode() const			{ return node; } 
+	TiXmlNode* ToNode() const			{ return node; }
 	/** Return the handle as a TiXmlElement. This may return null.
 	*/
 	TiXmlElement* ToElement() const		{ return ( ( node && node->ToElement() ) ? node->ToElement() : 0 ); }
@@ -1606,11 +1606,11 @@
 	*/
 	TiXmlUnknown* ToUnknown() const		{ return ( ( node && node->ToUnknown() ) ? node->ToUnknown() : 0 ); }
 
-	/** @deprecated use ToNode. 
+	/** @deprecated use ToNode.
 		Return the handle as a TiXmlNode. This may return null.
 	*/
-	TiXmlNode* Node() const			{ return ToNode(); } 
-	/** @deprecated use ToElement. 
+	TiXmlNode* Node() const			{ return ToNode(); }
+	/** @deprecated use ToElement.
 		Return the handle as a TiXmlElement. This may return null.
 	*/
 	TiXmlElement* Element() const	{ return ToElement(); }
@@ -1670,7 +1670,7 @@
 	void SetIndent( const char* _indent )			{ indent = _indent ? _indent : "" ; }
 	/// Query the indention string.
 	const char* Indent()							{ return indent.c_str(); }
-	/** Set the line breaking string. By default set to newline (\n). 
+	/** Set the line breaking string. By default set to newline (\n).
 		Some operating systems prefer other characters, or can be
 		set to the null/empty string for no indenation.
 	*/
@@ -1678,7 +1678,7 @@
 	/// Query the current line breaking string.
 	const char* LineBreak()							{ return lineBreak.c_str(); }
 
-	/** Switch over to "stream printing" which is the most dense formatting without 
+	/** Switch over to "stream printing" which is the most dense formatting without
 		linebreaks. Common when the XML is needed for network transmission.
 	*/
 	void SetStreamPrinting()						{ indent = "";

Modified: trunk/src/Analyses/ALEPH_1991_S2435284.cc
==============================================================================
--- trunk/src/Analyses/ALEPH_1991_S2435284.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/ALEPH_1991_S2435284.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,18 +14,18 @@
   public:
 
     /// Constructor.
-    ALEPH_1991_S2435284() 
+    ALEPH_1991_S2435284()
       : Analysis("ALEPH_1991_S2435284")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
     }
 
-  
+
     /// @name Analysis methods
     //@{
-    
+ 
     /// Book projections and histogram
-    void init() { 
+    void init() {
       const ChargedFinalState cfs;
       addProjection(cfs, "FS");
       addProjection(Multiplicity(cfs), "Mult");
@@ -47,7 +47,7 @@
       scale(_histChTot, 2.0/sumOfWeights()); // same as in ALEPH 1996
     }
 
-    //@}  
+    //@}
 
 
   private:
@@ -57,10 +57,10 @@
     AIDA::IHistogram1D* _histChTot;
     //@}
 
-  };    
+  };
 
-    
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<ALEPH_1991_S2435284> plugin_ALEPH_1991_S2435284;
-  
+
 }

Modified: trunk/src/Analyses/ALEPH_1996_S3196992.cc
==============================================================================
--- trunk/src/Analyses/ALEPH_1996_S3196992.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/ALEPH_1996_S3196992.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,7 +15,7 @@
   public:
 
     /// Constructor
-    ALEPH_1996_S3196992() : Analysis("ALEPH_1996_S3196992") 
+    ALEPH_1996_S3196992() : Analysis("ALEPH_1996_S3196992")
     {
       setBeams(ELECTRON, POSITRON);
     }
@@ -41,7 +41,7 @@
       _h_z_2jet_033 = bookHistogram1D(4, 1, 1);
       _h_z_3jet_001 = bookHistogram1D(5, 1, 1);
       _h_z_3jet_006 = bookHistogram1D(6, 1, 1);
-      _h_z_3jet_01  = bookHistogram1D(7, 1, 1); 
+      _h_z_3jet_01  = bookHistogram1D(7, 1, 1);
       _h_z_4jet_001 = bookHistogram1D(8, 1, 1);
     }
 
@@ -53,7 +53,7 @@
       if (applyProjection<FinalState>(event, "CFS").particles().size()<2) {
         vetoEvent;
       }
-      
+   
       const ParticleVector allphotons = applyProjection<IdentifiedFinalState>(event, "Photons").particles();
       ParticleVector photons;
       foreach (const Particle& photon, allphotons) {
@@ -64,7 +64,7 @@
       if (photons.size()<1) {
         vetoEvent;
       }
-      
+   
       const Thrust& thrust = applyProjection<Thrust>(event, "Thrust");
       if (fabs(cos(thrust.thrustAxis().theta()))>0.95) {
         vetoEvent;
@@ -73,7 +73,7 @@
       const FastJets& durjet = applyProjection<FastJets>(event, "DurhamJets");
 
       foreach (const Particle& photon, photons) {
-        
+     
         PseudoJets jets_001 = durjet.clusterSeq()->exclusive_jets_ycut(0.01);
         foreach (const fastjet::PseudoJet& jet, jets_001) {
           if (particleInJet(photon, jet, durjet.clusterSeq())) {
@@ -84,7 +84,7 @@
             break;
           }
         }
-        
+     
         PseudoJets jets_006 = durjet.clusterSeq()->exclusive_jets_ycut(0.06);
         foreach (const fastjet::PseudoJet& jet, jets_006) {
           if (particleInJet(photon, jet, durjet.clusterSeq())) {
@@ -94,7 +94,7 @@
             break;
           }
         }
-        
+     
         PseudoJets jets_01 = durjet.clusterSeq()->exclusive_jets_ycut(0.1);
         foreach (const fastjet::PseudoJet& jet, jets_01) {
           if (particleInJet(photon, jet, durjet.clusterSeq())) {
@@ -104,7 +104,7 @@
             break;
           }
         }
-        
+     
         PseudoJets jets_033 = durjet.clusterSeq()->exclusive_jets_ycut(0.33);
         foreach (const fastjet::PseudoJet& jet, jets_033) {
           if (particleInJet(photon, jet, durjet.clusterSeq())) {
@@ -113,11 +113,11 @@
             break;
           }
         }
-        
+     
       }
     }
-    
-    
+ 
+ 
     bool particleInJet(const Particle& p, const fastjet::PseudoJet& jet,
                        const fastjet::ClusterSequence* cseq ) {
       foreach (const fastjet::PseudoJet& jetpart, cseq->constituents(jet)) {

Modified: trunk/src/Analyses/ALEPH_1996_S3486095.cc
==============================================================================
--- trunk/src/Analyses/ALEPH_1996_S3486095.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/ALEPH_1996_S3486095.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -22,19 +22,19 @@
   public:
 
     /// Constructor
-    ALEPH_1996_S3486095() 
+    ALEPH_1996_S3486095()
       : Analysis("ALEPH_1996_S3486095")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
       _numChParticles               = 0;
       _weightedTotalPartNum         = 0;
-      _weightedTotalNumPiPlus       = 0;       
-      _weightedTotalNumKPlus        = 0;      
-      _weightedTotalNumP            = 0;     
-      _weightedTotalNumPhoton       = 0;    
-      _weightedTotalNumPi0          = 0;   
-      _weightedTotalNumEta          = 0;  
-      _weightedTotalNumEtaPrime     = 0; 
+      _weightedTotalNumPiPlus       = 0;
+      _weightedTotalNumKPlus        = 0;
+      _weightedTotalNumP            = 0;
+      _weightedTotalNumPhoton       = 0;
+      _weightedTotalNumPi0          = 0;
+      _weightedTotalNumEta          = 0;
+      _weightedTotalNumEtaPrime     = 0;
       _weightedTotalNumK0           = 0;
       _weightedTotalNumLambda0      = 0;
       _weightedTotalNumXiMinus      = 0;
@@ -72,48 +72,48 @@
       _histTMinor       = bookHistogram1D(4, 1, 1);
 
       _histY3           = bookHistogram1D(5, 1, 1);
-      _histHeavyJetMass = bookHistogram1D(6, 1, 1); 
-      _histCParam       = bookHistogram1D(7, 1, 1); 
-      _histOblateness   = bookHistogram1D(8, 1, 1); 
+      _histHeavyJetMass = bookHistogram1D(6, 1, 1);
+      _histCParam       = bookHistogram1D(7, 1, 1);
+      _histOblateness   = bookHistogram1D(8, 1, 1);
 
-      _histScaledMom    = bookHistogram1D(9, 1, 1); 
-      _histRapidityT    = bookHistogram1D(10, 1, 1); 
+      _histScaledMom    = bookHistogram1D(9, 1, 1);
+      _histRapidityT    = bookHistogram1D(10, 1, 1);
 
-      _histPtSIn        = bookHistogram1D(11, 1, 1); 
-      _histPtSOut       = bookHistogram1D(12, 1, 1); 
+      _histPtSIn        = bookHistogram1D(11, 1, 1);
+      _histPtSOut       = bookHistogram1D(12, 1, 1);
 
-      _histLogScaledMom = bookHistogram1D(17, 1, 1); 
+      _histLogScaledMom = bookHistogram1D(17, 1, 1);
 
-      _histChMult       = bookHistogram1D(18, 1, 1); 
-      _histMeanChMult   = bookHistogram1D(19, 1, 1); 
+      _histChMult       = bookHistogram1D(18, 1, 1);
+      _histMeanChMult   = bookHistogram1D(19, 1, 1);
 
-      _histMeanChMultRapt05= bookHistogram1D(20, 1, 1); 
-      _histMeanChMultRapt10= bookHistogram1D(21, 1, 1); 
-      _histMeanChMultRapt15= bookHistogram1D(22, 1, 1); 
-      _histMeanChMultRapt20= bookHistogram1D(23, 1, 1); 
+      _histMeanChMultRapt05= bookHistogram1D(20, 1, 1);
+      _histMeanChMultRapt10= bookHistogram1D(21, 1, 1);
+      _histMeanChMultRapt15= bookHistogram1D(22, 1, 1);
+      _histMeanChMultRapt20= bookHistogram1D(23, 1, 1);
 
 
       // Particle spectra
-      _histMultiPiPlus        = bookHistogram1D(25, 1, 1); 
-      _histMultiKPlus         = bookHistogram1D(26, 1, 1); 
-      _histMultiP             = bookHistogram1D(27, 1, 1); 
-      _histMultiPhoton        = bookHistogram1D(28, 1, 1); 
-      _histMultiPi0           = bookHistogram1D(29, 1, 1); 
-      _histMultiEta           = bookHistogram1D(30, 1, 1); 
-      _histMultiEtaPrime      = bookHistogram1D(31, 1, 1); 
-      _histMultiK0            = bookHistogram1D(32, 1, 1); 
-      _histMultiLambda0       = bookHistogram1D(33, 1, 1); 
-      _histMultiXiMinus       = bookHistogram1D(34, 1, 1); 
-      _histMultiSigma1385Plus = bookHistogram1D(35, 1, 1); 
-      _histMultiXi1530_0      = bookHistogram1D(36, 1, 1); 
-      _histMultiRho           = bookHistogram1D(37, 1, 1); 
-      _histMultiOmega782      = bookHistogram1D(38, 1, 1); 
-      _histMultiKStar892_0    = bookHistogram1D(39, 1, 1); 
-      _histMultiPhi           = bookHistogram1D(40, 1, 1); 
+      _histMultiPiPlus        = bookHistogram1D(25, 1, 1);
+      _histMultiKPlus         = bookHistogram1D(26, 1, 1);
+      _histMultiP             = bookHistogram1D(27, 1, 1);
+      _histMultiPhoton        = bookHistogram1D(28, 1, 1);
+      _histMultiPi0           = bookHistogram1D(29, 1, 1);
+      _histMultiEta           = bookHistogram1D(30, 1, 1);
+      _histMultiEtaPrime      = bookHistogram1D(31, 1, 1);
+      _histMultiK0            = bookHistogram1D(32, 1, 1);
+      _histMultiLambda0       = bookHistogram1D(33, 1, 1);
+      _histMultiXiMinus       = bookHistogram1D(34, 1, 1);
+      _histMultiSigma1385Plus = bookHistogram1D(35, 1, 1);
+      _histMultiXi1530_0      = bookHistogram1D(36, 1, 1);
+      _histMultiRho           = bookHistogram1D(37, 1, 1);
+      _histMultiOmega782      = bookHistogram1D(38, 1, 1);
+      _histMultiKStar892_0    = bookHistogram1D(39, 1, 1);
+      _histMultiPhi           = bookHistogram1D(40, 1, 1);
 
-      _histMultiKStar892Plus  = bookHistogram1D(43, 1, 1); 
+      _histMultiKStar892Plus  = bookHistogram1D(43, 1, 1);
 
-      // Mean multiplicities 
+      // Mean multiplicities
       _histMeanMultiPi0           = bookHistogram1D(44, 1,  2);
       _histMeanMultiEta           = bookHistogram1D(44, 1,  3);
       _histMeanMultiEtaPrime      = bookHistogram1D(44, 1,  4);
@@ -150,15 +150,15 @@
 
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
 
       // Thrusts
       getLog() << Log::DEBUG << "Calculating thrust" << endl;
       const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
-      _hist1MinusT->fill(1 - thrust.thrust(), weight); 
-      _histTMinor->fill(thrust.thrustMinor(), weight); 
+      _hist1MinusT->fill(1 - thrust.thrust(), weight);
+      _histTMinor->fill(thrust.thrustMinor(), weight);
       _histOblateness->fill(thrust.oblateness(), weight);
 
       // Jets
@@ -172,8 +172,8 @@
       // Sphericities
       getLog() << Log::DEBUG << "Calculating sphericity" << endl;
       const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
-      _histSphericity->fill(sphericity.sphericity(), weight); 
-      _histAplanarity->fill(sphericity.aplanarity(), weight); 
+      _histSphericity->fill(sphericity.sphericity(), weight);
+      _histAplanarity->fill(sphericity.aplanarity(), weight);
 
       // C param
       getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
@@ -204,8 +204,8 @@
         const double mom = mom3.mod();
         const double scaledMom = mom/meanBeamMom;
         const double logInvScaledMom = -std::log(scaledMom);
-        _histLogScaledMom->fill(logInvScaledMom, weight); 
-        _histScaledMom->fill(scaledMom, weight); 
+        _histLogScaledMom->fill(logInvScaledMom, weight);
+        _histScaledMom->fill(scaledMom, weight);
 
         // Get momenta components w.r.t. thrust and sphericity.
         const double momT = dot(thrust.thrustAxis(), mom3);
@@ -228,7 +228,7 @@
         }
         if (std::fabs(rapidityT) <= 2.0)  {
             rapt20 += 1.0;
-        } 
+        }
 
       }
 
@@ -251,7 +251,7 @@
         const double scaledMom = mom/meanBeamMom;
         const double scaledEnergy = energy/meanBeamMom;  // meanBeamMom is approximately beam energy
         switch (id) {
-           case 22: 
+           case 22:
               _histMultiPhoton->fill(-1.*std::log(scaledMom), weight);
               _weightedTotalNumPhoton += weight;
               break;
@@ -356,19 +356,19 @@
 
 
     /// Finalize
-    void finalize() { 
-      // Normalize inclusive single particle distributions to the average number 
+    void finalize() {
+      // Normalize inclusive single particle distributions to the average number
       // of charged particles per event.
       const double avgNumParts = _weightedTotalPartNum / sumOfWeights();
 
       normalize(_histPtSIn, avgNumParts);
-      normalize(_histPtSOut, avgNumParts); 
+      normalize(_histPtSOut, avgNumParts);
 
-      normalize(_histRapidityT, avgNumParts); 
-      normalize(_histY3); 
+      normalize(_histRapidityT, avgNumParts);
+      normalize(_histY3);
 
       normalize(_histLogScaledMom, avgNumParts);
-      normalize(_histScaledMom, avgNumParts); 
+      normalize(_histScaledMom, avgNumParts);
 
       // particle spectra
       scale(_histMultiPiPlus        ,1./sumOfWeights());
@@ -410,18 +410,18 @@
       //normalize(_histMultiKStar892Plus  ,_weightedTotalNumKStar892Plus/sumOfWeights());
 
       // event shape
-      normalize(_hist1MinusT); 
-      normalize(_histTMinor); 
-      normalize(_histOblateness); 
-
-      normalize(_histSphericity); 
-      normalize(_histAplanarity); 
-      normalize(_histHeavyJetMass);  
-      normalize(_histCParam); 
+      normalize(_hist1MinusT);
+      normalize(_histTMinor);
+      normalize(_histOblateness);
+
+      normalize(_histSphericity);
+      normalize(_histAplanarity);
+      normalize(_histHeavyJetMass);
+      normalize(_histCParam);
 
 
-      // mean multiplicities 
-      scale(_histChMult              , 2.0/sumOfWeights()); // taking into account the binwidth of 2 
+      // mean multiplicities
+      scale(_histChMult              , 2.0/sumOfWeights()); // taking into account the binwidth of 2
       scale(_histMeanChMult          , 1.0/sumOfWeights());
       scale(_histMeanChMultRapt05    , 1.0/sumOfWeights());
       scale(_histMeanChMultRapt10    , 1.0/sumOfWeights());
@@ -451,16 +451,16 @@
 
   private:
     /// Store the weighted sums of numbers of charged / charged+neutral
-    /// particles - used to calculate average number of particles for the 
+    /// particles - used to calculate average number of particles for the
     /// inclusive single particle distributions' normalisations.
     double _weightedTotalPartNum;
-    double _weightedTotalNumPiPlus;       
-    double _weightedTotalNumKPlus;      
-    double _weightedTotalNumP;     
-    double _weightedTotalNumPhoton;    
-    double _weightedTotalNumPi0;   
-    double _weightedTotalNumEta;  
-    double _weightedTotalNumEtaPrime; 
+    double _weightedTotalNumPiPlus;
+    double _weightedTotalNumKPlus;
+    double _weightedTotalNumP;
+    double _weightedTotalNumPhoton;
+    double _weightedTotalNumPi0;
+    double _weightedTotalNumEta;
+    double _weightedTotalNumEtaPrime;
     double _weightedTotalNumK0;
     double _weightedTotalNumLambda0;
     double _weightedTotalNumXiMinus;
@@ -478,30 +478,30 @@
     AIDA::IHistogram1D *_histSphericity;
     AIDA::IHistogram1D *_histAplanarity;
 
-    AIDA::IHistogram1D *_hist1MinusT; 
-    AIDA::IHistogram1D *_histTMinor; 
-    
+    AIDA::IHistogram1D *_hist1MinusT;
+    AIDA::IHistogram1D *_histTMinor;
+ 
     AIDA::IHistogram1D *_histY3;
     AIDA::IHistogram1D *_histHeavyJetMass;
     AIDA::IHistogram1D *_histCParam;
-    AIDA::IHistogram1D *_histOblateness; 
-    
-    AIDA::IHistogram1D *_histScaledMom; 
+    AIDA::IHistogram1D *_histOblateness;
+ 
+    AIDA::IHistogram1D *_histScaledMom;
     AIDA::IHistogram1D *_histRapidityT;
 
     AIDA::IHistogram1D *_histPtSIn;
     AIDA::IHistogram1D *_histPtSOut;
-    
+ 
     AIDA::IHistogram1D *_histJetRate2Durham;
     AIDA::IHistogram1D *_histJetRate3Durham;
     AIDA::IHistogram1D *_histJetRate4Durham;
     AIDA::IHistogram1D *_histJetRate5Durham;
-   
+
     AIDA::IHistogram1D *_histLogScaledMom;
-    
-    
+ 
+ 
     AIDA::IHistogram1D *_histChMult;
-    
+ 
 
     AIDA::IHistogram1D *_histMultiPiPlus;
     AIDA::IHistogram1D *_histMultiKPlus;
@@ -520,29 +520,29 @@
     AIDA::IHistogram1D *_histMultiKStar892_0;
     AIDA::IHistogram1D *_histMultiPhi;
     AIDA::IHistogram1D *_histMultiKStar892Plus;
-   
+
     // mean multiplicities
     AIDA::IHistogram1D *_histMeanChMult;
     AIDA::IHistogram1D *_histMeanChMultRapt05;
     AIDA::IHistogram1D *_histMeanChMultRapt10;
     AIDA::IHistogram1D *_histMeanChMultRapt15;
     AIDA::IHistogram1D *_histMeanChMultRapt20;
-    
-    AIDA::IHistogram1D *_histMeanMultiPi0;          
-    AIDA::IHistogram1D *_histMeanMultiEta;          
-    AIDA::IHistogram1D *_histMeanMultiEtaPrime;     
-    AIDA::IHistogram1D *_histMeanMultiK0;           
-    AIDA::IHistogram1D *_histMeanMultiRho;          
-    AIDA::IHistogram1D *_histMeanMultiOmega782;        
-    AIDA::IHistogram1D *_histMeanMultiPhi;         
-    AIDA::IHistogram1D *_histMeanMultiKStar892Plus; 
-    AIDA::IHistogram1D *_histMeanMultiKStar892_0;   
-    AIDA::IHistogram1D *_histMeanMultiLambda0;      
-    AIDA::IHistogram1D *_histMeanMultiSigma0;       
-    AIDA::IHistogram1D *_histMeanMultiXiMinus;      
+ 
+    AIDA::IHistogram1D *_histMeanMultiPi0;
+    AIDA::IHistogram1D *_histMeanMultiEta;
+    AIDA::IHistogram1D *_histMeanMultiEtaPrime;
+    AIDA::IHistogram1D *_histMeanMultiK0;
+    AIDA::IHistogram1D *_histMeanMultiRho;
+    AIDA::IHistogram1D *_histMeanMultiOmega782;
+    AIDA::IHistogram1D *_histMeanMultiPhi;
+    AIDA::IHistogram1D *_histMeanMultiKStar892Plus;
+    AIDA::IHistogram1D *_histMeanMultiKStar892_0;
+    AIDA::IHistogram1D *_histMeanMultiLambda0;
+    AIDA::IHistogram1D *_histMeanMultiSigma0;
+    AIDA::IHistogram1D *_histMeanMultiXiMinus;
     AIDA::IHistogram1D *_histMeanMultiSigma1385Plus;
-    AIDA::IHistogram1D *_histMeanMultiXi1530_0;     
-    AIDA::IHistogram1D *_histMeanMultiOmegaOmegaBar;        
+    AIDA::IHistogram1D *_histMeanMultiXi1530_0;
+    AIDA::IHistogram1D *_histMeanMultiOmegaOmegaBar;
     //@}
 
   };

Modified: trunk/src/Analyses/CDF_1988_S1865951.cc
==============================================================================
--- trunk/src/Analyses/CDF_1988_S1865951.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1988_S1865951.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,8 +14,8 @@
   public:
 
     /// Constructor
-    CDF_1988_S1865951() 
-      : Analysis("CDF_1988_S1865951") 
+    CDF_1988_S1865951()
+      : Analysis("CDF_1988_S1865951")
     {
       setBeams(PROTON, ANTIPROTON);
     }
@@ -23,7 +23,7 @@
 
     /// @name Analysis methods
     //@{
-    
+ 
     /// Book histograms
     void init() {
       addProjection(TriggerCDFRun0Run1(), "Trigger");
@@ -35,8 +35,8 @@
       _hist_pt1800 = bookHistogram1D(1, 1, 1);
       _hist_pt630 = bookHistogram1D(2, 1, 1);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& event) {
       // Trigger
@@ -46,7 +46,7 @@
 
       const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
       const FinalState& trackfs = applyProjection<ChargedFinalState>(event, "CFS");
-      
+   
       foreach (Particle p, trackfs.particles()) {
         const double pt = p.momentum().pT();
         // Effective weight for d3sig/dp3 = weight / ( Delta eta * 2pi * pt ), with Delta(eta) = 2
@@ -56,22 +56,22 @@
         } else if (fuzzyEquals(sqrtS, 1800/GeV)) {
           _hist_pt1800->fill(pt, eff_weight);
         }
-        
+     
       }
     }
-    
-    
+ 
+ 
     /// Scale histos
     void finalize() {
       /// @todo Total cross section hard-coded, needs a way to pass variable from generator
       scale(_hist_pt630, 32.6/sumOfWeights());
       scale(_hist_pt1800, 38.5/sumOfWeights());
     }
-   
+
     //@}
 
   private:
-    
+ 
     /// @name Histos
     //@{
     AIDA::IHistogram1D* _hist_pt630;
@@ -79,8 +79,8 @@
     //@}
 
   };
- 
-  
+
+
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_1988_S1865951> plugin_CDF_1988_S1865951;

Modified: trunk/src/Analyses/CDF_1990_S2089246.cc
==============================================================================
--- trunk/src/Analyses/CDF_1990_S2089246.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1990_S2089246.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -46,7 +46,7 @@
       if (!trigger) vetoEvent;
 
       // Get final state and energy
-      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS(); 
+      const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
       const FinalState& fs = applyProjection<FinalState>(event, "CFS");
 
       // Loop over final state charged particles to fill eta histos
@@ -60,16 +60,16 @@
         }
       }
     }
-    
-    
-    
+ 
+ 
+ 
     /// Finalize
     void finalize() {
       // Divide through by num events to get d<N>/d(eta) in bins
       scale(_hist_eta630, 1/sumOfWeights());
       scale(_hist_eta1800, 1/sumOfWeights());
     }
-   
+
     //@}
 
 
@@ -82,8 +82,8 @@
     //@}
 
   };
+
  
-    
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_1990_S2089246> plugin_CDF_1990_S2089246;

Modified: trunk/src/Analyses/CDF_1994_S2952106.cc
==============================================================================
--- trunk/src/Analyses/CDF_1994_S2952106.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1994_S2952106.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,13 +18,13 @@
 
     /// Constructor
     CDF_1994_S2952106()
-      : Analysis("CDF_1994_S2952106"), 
+      : Analysis("CDF_1994_S2952106"),
         _pvzmax(600*mm), _leadJetPt(100*GeV), _3rdJetPt(10*GeV),
         _etamax(0.7), _phimin(PI/18.0), _metsetmax(6.0*GeV)
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-            
+         
       _events3jPassed = 0.0;
     }
 
@@ -38,7 +38,7 @@
       addProjection(FastJets(fs, FastJets::CDFJETCLU, 0.7), "ConeJets");
       addProjection(TotalVisibleMomentum(fs), "CalMET");
       addProjection(PVertex(), "PV");
-      
+   
       // Veto (anti)neutrinos, and muons with pT above 1.0 GeV
       VetoedFinalState vfs(fs);
       vfs.vetoNeutrinos();
@@ -46,99 +46,99 @@
       addProjection(vfs, "VFS");
 
       /// @todo Use histogram auto-booking
-      
+   
       //const string hname = "HvsDphi";
       //const string htitle = "H vs Delta phi";
       //_histHvsDphi = bookHistogram2D(hname, htitle, 40, -4., 4., 32, 0., 3.2);
-      
+   
       //const string hname2 = "RvsAlpha";
       //const string htitle2 = "R vs alpha";
       //_histRvsAlpha = bookHistogram2D(hname2, htitle2, 50, 0., 5., 32, -1.6, 1.6);
-      
+   
       _histJet1Et  = bookHistogram1D("Jet1Et", 40, 0., 500.);
       _histJet2Et  = bookHistogram1D("Jet2Et", 40, 0., 500.);
       _histR23     = bookHistogram1D("R23", 50, 0., 5.);
       _histJet3eta = bookHistogram1D("Jet3eta", 42, -4., 4.);
-      
+   
       /// @todo Need better title
       _histAlpha = bookHistogram1D("alpha", 42, -PI/2., PI/2.);
-      
+   
       //const string hname8 = "alphaMCvsDat";
       //const string htitle8 = "alpha MC vs. Data ";
       //_histAlphaMCvsDat = bookHistogram1D(hname8, htitle8, 42, -PI/2., PI/2.);
-      
+   
       /// @todo Need better title
       _histAlpaIdeal = bookHistogram1D("alphaIdeal", 42, -PI/2., PI/2.);
-      
+   
       /// @todo Need better title
       _histAlpaCDF = bookHistogram1D("alphaCDF", 42, -PI/2., PI/2.);
-      
+   
       /// @todo Need better title
       _histR23Ideal = bookHistogram1D("R23Ideal", 50, 0., 5.);
-      
+   
       /// @todo Need better title
       _histR23CDF = bookHistogram1D("R23CDF", 50, 0., 5.);
-      
+   
       /// @todo Need better title
       _histJet3etaIdeal = bookHistogram1D("Jet3etaIdeal", 42, -4., 4.);
-      
+   
       /// @todo Need better title
       _histJet3etaCDF = bookHistogram1D("Jet3etaCDF", 42, -4., 4.);
     }
-    
-    
-    
+ 
+ 
+ 
     // Do the analysis
     void analyze(const Event & event) {
       const Jets jets = applyProjection<FastJets>(event, "ConeJets").jetsByPt();
       getLog() << Log::DEBUG << "Jet multiplicity before any cuts = " << jets.size() << endl;
-      
+   
       // Find vertex and check  that its z-component is < 60 cm from the nominal IP
       const PVertex& pv = applyProjection<PVertex>(event, "PV");
       if (fabs(pv.position().z())/mm > _pvzmax) {
         vetoEvent;
       }
-      
+   
       // Check there isn't too much missing Et
       const TotalVisibleMomentum& caloMissEt = applyProjection<TotalVisibleMomentum>(event, "CalMET");
-      getLog() << Log::DEBUG << "Missing pT = " << caloMissEt.momentum().pT()/GeV << " GeV" << endl;      
+      getLog() << Log::DEBUG << "Missing pT = " << caloMissEt.momentum().pT()/GeV << " GeV" << endl;
       if ((caloMissEt.momentum().pT()/GeV) / sqrt(caloMissEt.scalarET()/GeV) > _metsetmax) {
         vetoEvent;
       }
-      
+   
       // Check jet requirements
       if (jets.size() < 3) vetoEvent;
       if (jets[0].momentum().pT() < 100*GeV) vetoEvent;
-      
+   
       // More jet 1,2,3 checks
       FourMomentum pj1(jets[0].momentum()), pj2(jets[1].momentum()), pj3(jets[2].momentum());
       if (fabs(pj1.eta()) > _etamax || fabs(pj2.eta()) > _etamax) vetoEvent;
-      getLog() << Log::DEBUG << "Jet 1 & 2 eta, pT requirements fulfilled" << endl;          
-      
+      getLog() << Log::DEBUG << "Jet 1 & 2 eta, pT requirements fulfilled" << endl;
+   
       if (deltaPhi(pj1.phi(), pj2.phi()) > _phimin) vetoEvent;
       getLog() << Log::DEBUG << "Jet 1 & 2 phi requirement fulfilled" << endl;
-      
+   
       const double weight = event.weight();
       _histJet1Et->fill(pj1.pT(), weight);
       _histJet2Et->fill(pj2.pT(), weight);
       _histR23->fill(deltaR(pj2, pj3), weight);
       _histJet3eta->fill(pj3.eta(), weight);
-      
+   
       // Next cut only required for alpha studies
       if (pj3.pT() < _3rdJetPt) vetoEvent;
-      getLog() << Log::DEBUG << "3rd jet passes alpha histo pT cut" << endl;      
+      getLog() << Log::DEBUG << "3rd jet passes alpha histo pT cut" << endl;
       _events3jPassed += weight;
-      
+   
       // Calc and plot alpha
-      const double dPhi = deltaPhi(pj3.phi(), pj2.phi());    
+      const double dPhi = deltaPhi(pj3.phi(), pj2.phi());
       const double dH = sign(pj2.eta()) * (pj3.eta() - pj2.eta());
       const double alpha = atan(dH/dPhi);
       _histAlpha->fill(alpha, weight);
     }
-    
-    
+ 
+ 
     /// Finalize
-    void finalize() { 
+    void finalize() {
       /// @todo Apply correction
       // double a, b, c, erra, errb, errc;
       // for (int ibin = 0;  ibin < _histAlpha->getNbins(); ++ibin) {
@@ -149,11 +149,11 @@
       // c = _histAlpaCDF->GetBinContent(ibin);
       // errc = _histAlpaCDF->GetBinError(ibin);
       // _histAlpha->SetBinContent(ibin, b/c);
-      // _histAlpha->SetBinError(ibin, sqrt(sqr(b)/sqr(c)*sqr(erra) + sqr(a)/sqr(c)*sqr(errb) + 
+      // _histAlpha->SetBinError(ibin, sqrt(sqr(b)/sqr(c)*sqr(erra) + sqr(a)/sqr(c)*sqr(errb) +
       // sqr(a*b/(sqr(c)))*sqr(errc) ) );
       // }
       /// @todo Same correction to be applied for _hisR23 and _histJet3eta histograms
-      
+   
       getLog() << Log::INFO << "Cross-section = " << crossSection()/picobarn << " pb" << endl;
       normalize(_histJet1Et);
       normalize(_histJet2Et);
@@ -161,7 +161,7 @@
       normalize(_histJet3eta);
       normalize(_histAlpha);
     }
- 
+
     //@}
 
   private:
@@ -216,8 +216,8 @@
     //@}
 
   };
-   
-    
+
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_1994_S2952106> plugin_CDF_1994_S2952106;

Modified: trunk/src/Analyses/CDF_1996_S3108457.cc
==============================================================================
--- trunk/src/Analyses/CDF_1996_S3108457.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1996_S3108457.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_1996_S3108457()
-      : Analysis("CDF_1996_S3108457") 
+      : Analysis("CDF_1996_S3108457")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -36,7 +36,7 @@
       /// Initialise and register projections here
       const FinalState fs(-4.2, 4.2);
       addProjection(FastJets(fs, FastJets::CDFJETCLU, 0.7), "Jets");
-      
+   
 
       /// Book histograms here, e.g.:
       for (size_t i=0; i<5; ++i) {
@@ -65,29 +65,29 @@
         }
       }
       /// @todo include gaussian jet energy resolution smearing?
-      
+   
       if (jets.size() < 2 || jets.size() > 6) {
         vetoEvent;
       }
-      
+   
       if (sumEt < 420.0*GeV) {
         vetoEvent;
       }
-      
+   
       LorentzTransform cms_boost(-jetsystem.boostVector());
       FourMomentum jet0boosted(cms_boost.transform(jets[0].momentum()));
-      
+   
       double mass = jetsystem.mass();
       double costheta0 = fabs(cos(jet0boosted.theta()));
-      
+   
       if (costheta0 < 2.0/3.0) {
         _h_m[jets.size()-2]->fill(mass, weight);
       }
-      
+   
       if (mass > 600.0*GeV) {
         _h_costheta[jets.size()-2]->fill(costheta0, weight);
       }
-      
+   
       if (costheta0 < 2.0/3.0 && mass > 600.0*GeV) {
         foreach (const Jet jet, jets) {
           _h_pT[jets.size()-2]->fill(jet.momentum().pT(), weight);
@@ -98,14 +98,14 @@
 
     /// Normalise histograms etc., after the run
     void finalize() {
-      
+   
       /// Normalise, scale and otherwise manipulate histograms here
       for (size_t i=0; i<5; ++i) {
         normalize(_h_m[i], 40.0);
         normalize(_h_costheta[i], 2.0);
         normalize(_h_pT[i], 20.0);
       }
-      
+   
     }
 
     //@}
@@ -119,7 +119,7 @@
     AIDA::IHistogram1D *_h_m[5];
     AIDA::IHistogram1D *_h_costheta[5];
     AIDA::IHistogram1D *_h_pT[5];
-    
+ 
     //@}
 
   };

Modified: trunk/src/Analyses/CDF_1996_S3349578.cc
==============================================================================
--- trunk/src/Analyses/CDF_1996_S3349578.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1996_S3349578.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_1996_S3349578()
-      : Analysis("CDF_1996_S3349578") 
+      : Analysis("CDF_1996_S3349578")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -46,7 +46,7 @@
       _h_3_f3 = bookHistogram1D(14, 1, 1);
       _h_3_f4 = bookHistogram1D(14, 1, 2);
       _h_3_f5 = bookHistogram1D(14, 1, 3);
-      
+   
       _h_4_mNJ = bookHistogram1D(1, 1, 2);
       _h_4_X3 = bookHistogram1D(4, 1, 1);
       _h_4_X4 = bookHistogram1D(5, 1, 1);
@@ -59,7 +59,7 @@
       _h_4_psiAB = bookHistogram1D(19, 1, 1);
       _h_4_fA = bookHistogram1D(21, 1, 1);
       _h_4_fB = bookHistogram1D(21, 1, 2);
-      
+   
       _h_5_mNJ = bookHistogram1D(1, 1, 3);
       _h_5_X3 = bookHistogram1D(6, 1, 1);
       _h_5_X4 = bookHistogram1D(7, 1, 1);
@@ -76,7 +76,7 @@
       _h_5_fB = bookHistogram1D(23, 1, 1);
       _h_5_fC = bookHistogram1D(24, 1, 1);
       _h_5_fD = bookHistogram1D(25, 1, 1);
-      
+   
     }
 
 
@@ -97,20 +97,20 @@
         }
       }
       /// @todo include gaussian jet energy resolution smearing?
-      
+   
       if (jets.size() < 3) {
         vetoEvent;
       }
-      
+   
       if (sumEt < 420.0*GeV) {
         vetoEvent;
       }
-      
+   
       if (jets.size() > 2) _threeJetAnalysis(jets, weight);
       if (jets.size() > 3) _fourJetAnalysis(jets, weight);
       if (jets.size() > 4) _fiveJetAnalysis(jets, weight);
     }
-    
+ 
     void _threeJetAnalysis(const Jets& jets, const double& weight) {
       getLog() << Log::DEBUG << "3 jet analysis" << std::endl;
       FourMomentum jjj(jets[0].momentum()+jets[1].momentum()+jets[2].momentum());
@@ -118,7 +118,7 @@
       if (m3J<600*GeV) {
         return;
       }
-    
+ 
       LorentzTransform cms_boost(-jjj.boostVector());
       vector<FourMomentum> jets_boosted;
       foreach (Jet jet, jets) {
@@ -128,18 +128,18 @@
       FourMomentum p3(jets_boosted[0]);
       FourMomentum p4(jets_boosted[1]);
       FourMomentum p5(jets_boosted[2]);
-      
+   
       double costheta3 = cos(p3.theta());
       if (fabs(costheta3)>0.6) {
         return;
       }
-      
+   
       double X3 = 2.0*p3.E()/m3J;
       if (X3>0.9) {
         return;
       }
-      
-      
+   
+   
       // fill histograms
       const double X4 = 2.0*p4.E()/m3J;
       Vector3 beam1(0.0, 0.0, 1.0);
@@ -149,7 +149,7 @@
       const double f3 = p3.mass()/m3J;
       const double f4 = p4.mass()/m3J;
       const double f5 = p5.mass()/m3J;
-      
+   
       _h_3_mNJ->fill(m3J, weight);
       _h_3_X3->fill(X3, weight);
       _h_3_X4->fill(X4, weight);
@@ -158,7 +158,7 @@
       _h_3_f3->fill(f3, weight);
       _h_3_f4->fill(f4, weight);
       _h_3_f5->fill(f5, weight);
-      
+   
     }
 
     void _fourJetAnalysis(const Jets& jets, const double& weight) {
@@ -171,7 +171,7 @@
       }
       const double m4J = jjjj.mass();
       if (m4J < 650*GeV) return;
-      
+   
       FourMomentum pA, pB;
       vector<FourMomentum> jetmoms3(_reduce(jetmoms, pA, pB));
       LorentzTransform cms_boost(-jjjj.boostVector());
@@ -181,23 +181,23 @@
       }
       pA = cms_boost.transform(pA);
       pB = cms_boost.transform(pB);
-      
+   
       sort(jetmoms3_boosted.begin(), jetmoms3_boosted.end(), FourMomentum::byEDescending());
       if (pB.E()>pA.E()) std::swap(pA, pB);
       FourMomentum p3(jetmoms3_boosted[0]);
       FourMomentum p4(jetmoms3_boosted[1]);
       FourMomentum p5(jetmoms3_boosted[2]);
-      
+   
       const double costheta3 = cos(p3.theta());
       if (fabs(costheta3)>0.8) {
         return;
       }
-      
+   
       const double X3 = 2.0*p3.E()/m4J;
       if (X3>0.9) {
         return;
       }
-      
+   
       // fill histograms
       const double X4 = 2.0*p4.E()/m4J;
       Vector3 beam1(0.0, 0.0, 1.0);
@@ -214,7 +214,7 @@
       Vector3 pABxp1 = pAB.vector3().cross(beam1);
       Vector3 pAxpB = pA.vector3().cross(pB.vector3());
       const double cospsiAB = pAxpB.dot(pABxp1)/pAxpB.mod()/pABxp1.mod();
-      
+   
       _h_4_mNJ->fill(m4J, weight);
       _h_4_X3->fill(X3, weight);
       _h_4_X4->fill(X4, weight);
@@ -228,8 +228,8 @@
       _h_4_fA->fill(fA, weight);
       _h_4_fB->fill(fB, weight);
     }
-      
-      
+   
+   
     void _fiveJetAnalysis(const Jets& jets, const double& weight) {
       getLog() << Log::DEBUG << "5 jet analysis" << std::endl;
       FourMomentum jjjjj(0.0, 0.0, 0.0, 0.0);
@@ -240,11 +240,11 @@
       }
       const double m5J = jjjjj.mass();
       if (m5J < 750*GeV) return;
-      
+   
       FourMomentum pA, pB, pC, pD;
       vector<FourMomentum> jetmoms4(_reduce(jetmoms, pC, pD));
       vector<FourMomentum> jetmoms3(_reduce(jetmoms4, pA, pB));
-      
+   
       LorentzTransform cms_boost(-jjjjj.boostVector());
       vector<FourMomentum> jetmoms3_boosted;
       foreach (FourMomentum mom, jetmoms3) {
@@ -254,14 +254,14 @@
       pB = cms_boost.transform(pB);
       pC = cms_boost.transform(pC);
       pD = cms_boost.transform(pD);
-      
+   
       sort(jetmoms3_boosted.begin(), jetmoms3_boosted.end(), FourMomentum::byEDescending());
       if (pB.E()>pA.E()) std::swap(pA, pB);
       if (pD.E()>pC.E()) std::swap(pD, pC);
       FourMomentum p3(jetmoms3_boosted[0]);
       FourMomentum p4(jetmoms3_boosted[1]);
       FourMomentum p5(jetmoms3_boosted[2]);
-      
+   
       // fill histograms
       const double costheta3 = cos(p3.theta());
       const double X3 = 2.0*p3.E()/m5J;
@@ -287,7 +287,7 @@
       Vector3 pCDxp1 = pCD.vector3().cross(beam1);
       Vector3 pCxpD = pC.vector3().cross(pD.vector3());
       const double cospsiCD = pCxpD.dot(pCDxp1)/pCxpD.mod()/pCDxp1.mod();
-      
+   
       _h_5_mNJ->fill(m5J, weight);
       _h_5_X3->fill(X3, weight);
       _h_5_X4->fill(X4, weight);
@@ -305,11 +305,11 @@
       _h_5_fC->fill(fC, weight);
       _h_5_fD->fill(fD, weight);
     }
-      
-      
+   
+   
     /// Normalise histograms etc., after the run
     void finalize() {
-      
+   
       /// Normalise, scale and otherwise manipulate histograms here
       normalize(_h_3_mNJ, 1.0);
       normalize(_h_3_X3, 1.0);
@@ -319,7 +319,7 @@
       normalize(_h_3_f3, 1.0);
       normalize(_h_3_f4, 1.0);
       normalize(_h_3_f5, 1.0);
-      
+   
       normalize(_h_4_mNJ, 1.0);
       normalize(_h_4_X3, 1.0);
       normalize(_h_4_X4, 1.0);
@@ -332,7 +332,7 @@
       normalize(_h_4_psiAB, 1.0);
       normalize(_h_4_fA, 1.0);
       normalize(_h_4_fB, 1.0);
-      
+   
       normalize(_h_5_mNJ, 1.0);
       normalize(_h_5_X3, 1.0);
       normalize(_h_5_X4, 1.0);
@@ -349,7 +349,7 @@
       normalize(_h_5_fB, 1.0);
       normalize(_h_5_fC, 1.0);
       normalize(_h_5_fD, 1.0);
-      
+   
     }
 
     //@}
@@ -381,7 +381,7 @@
       combined2 = jets[idx2];
       return newjets;
     }
-    
+ 
 
   private:
 
@@ -395,7 +395,7 @@
     AIDA::IHistogram1D *_h_3_f3;
     AIDA::IHistogram1D *_h_3_f4;
     AIDA::IHistogram1D *_h_3_f5;
-    
+ 
     AIDA::IHistogram1D *_h_4_mNJ;
     AIDA::IHistogram1D *_h_4_X3;
     AIDA::IHistogram1D *_h_4_X4;
@@ -408,7 +408,7 @@
     AIDA::IHistogram1D *_h_4_psiAB;
     AIDA::IHistogram1D *_h_4_fA;
     AIDA::IHistogram1D *_h_4_fB;
-    
+ 
     AIDA::IHistogram1D *_h_5_mNJ;
     AIDA::IHistogram1D *_h_5_X3;
     AIDA::IHistogram1D *_h_5_X4;

Modified: trunk/src/Analyses/CDF_1996_S3418421.cc
==============================================================================
--- trunk/src/Analyses/CDF_1996_S3418421.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1996_S3418421.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -40,7 +40,7 @@
       _h_chi.addHistogram(400.0, 517.0, bookHistogram1D(1, 1, 3));
       _h_chi.addHistogram(517.0, 625.0, bookHistogram1D(1, 1, 4));
       _h_chi.addHistogram(625.0, 1800.0, bookHistogram1D(1, 1, 5));
-      
+   
       _h_ratio = bookHistogram1D(2,1,1);
       _chi_above_25.resize(_h_ratio->axis().bins());
       _chi_below_25.resize(_h_ratio->axis().bins());
@@ -63,10 +63,10 @@
       if (eta2>2.0 || eta1>2.0 || chi>5.0) {
         vetoEvent;
       }
-      
+   
       double m = FourMomentum(jet1+jet2).mass();
       _h_chi.fill(m, chi, weight);
-      
+   
       // fill ratio counter
       int bin = _h_ratio->coordToIndex(m);
       _nevt++;
@@ -81,7 +81,7 @@
 
     /// Normalise histograms etc., after the run
     void finalize() {
-      
+   
       foreach (AIDA::IHistogram1D* hist, _h_chi.getHistograms()) {
         // because HepData contains 100/N instead of 1/N this is 100 in the aida
         normalize(hist, 100.0);

Modified: trunk/src/Analyses/CDF_1998_S3618439.cc
==============================================================================
--- trunk/src/Analyses/CDF_1998_S3618439.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_1998_S3618439.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_1998_S3618439()
-      : Analysis("CDF_1998_S3618439") 
+      : Analysis("CDF_1998_S3618439")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);

Modified: trunk/src/Analyses/CDF_2000_S4155203.cc
==============================================================================
--- trunk/src/Analyses/CDF_2000_S4155203.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2000_S4155203.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,12 +12,12 @@
   /*
    * @brief CDF Run I Z pT in Drell-Yan events
    * @author Hendrik Hoeth
-   */ 
+   */
   class CDF_2000_S4155203 : public Analysis {
   public:
 
     /// Constructor
-    CDF_2000_S4155203() 
+    CDF_2000_S4155203()
       : Analysis("CDF_2000_S4155203")
     {
       setBeams(PROTON, ANTIPROTON);
@@ -26,7 +26,7 @@
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Set up projections
       ZFinder zfinder(FinalState(), ELECTRON, 66.0*GeV, 116.0*GeV, 0.2);
@@ -35,8 +35,8 @@
       // Book histogram
       _hist_zpt = bookHistogram1D(1, 1, 1);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& e) {
       const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
@@ -44,20 +44,20 @@
         getLog() << Log::DEBUG << "No unique e+e- pair found" << endl;
         vetoEvent;
       }
-      
-      FourMomentum pZ = zfinder.particles()[0].momentum();    
+   
+      FourMomentum pZ = zfinder.particles()[0].momentum();
       getLog() << Log::DEBUG << "Dilepton mass = " << pZ.mass()/GeV << " GeV"  << endl;
       getLog() << Log::DEBUG << "Dilepton pT   = " << pZ.pT()/GeV << " GeV" << endl;
       _hist_zpt->fill(pZ.pT()/GeV, e.weight());
     }
-    
-    
+ 
+ 
     void finalize() {
       // Normalize to the experimental cross-section
       /// @todo Get norm from generator cross-section
       normalize(_hist_zpt, 247.4);
     }
-    
+ 
     //@}
 
 
@@ -68,7 +68,7 @@
 
   };
 
-    
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2000_S4155203> plugin_CDF_2000_S4155203;

Modified: trunk/src/Analyses/CDF_2000_S4266730.cc
==============================================================================
--- trunk/src/Analyses/CDF_2000_S4266730.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2000_S4266730.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_2000_S4266730()
-      : Analysis("CDF_2000_S4266730") 
+      : Analysis("CDF_2000_S4266730")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -63,7 +63,7 @@
         vetoEvent;
       }
       _h_mjj->fill(mjj, weight);
-      
+   
     }
 
 

Modified: trunk/src/Analyses/CDF_2001_S4517016.cc
==============================================================================
--- trunk/src/Analyses/CDF_2001_S4517016.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2001_S4517016.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,7 +17,7 @@
 
     /// Constructor
     CDF_2001_S4517016()
-      : Analysis("CDF_2001_S4517016") 
+      : Analysis("CDF_2001_S4517016")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);

Modified: trunk/src/Analyses/CDF_2001_S4563131.cc
==============================================================================
--- trunk/src/Analyses/CDF_2001_S4563131.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2001_S4563131.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_2001_S4563131()
-      : Analysis("CDF_2001_S4563131") 
+      : Analysis("CDF_2001_S4563131")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -51,7 +51,7 @@
           _h_ET->fill(jet.momentum().Et(), weight);
         }
       }
-      
+   
     }
 
 

Modified: trunk/src/Analyses/CDF_2001_S4751469.cc
==============================================================================
--- trunk/src/Analyses/CDF_2001_S4751469.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2001_S4751469.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,20 +18,20 @@
 
   /* @brief "Field-Stuart" CDF Run I underlying event analysis
    * @author Andy Buckley
-   * 
-   * 
+   *
+   *
    * @par Run conditions
-   * 
+   *
    * @arg \f$ \sqrt{s} = \f$ 1800 GeV
    * @arg Run with generic QCD events.
    * @arg Several \f$ p_\perp^\text{min} \f$ cutoffs are probably required to fill the profile histograms:
    *   @arg \f$ p_\perp^\text{min} = \f$ 0 (min bias), 10, 20 GeV
-   * 
-   */ 
+   *
+   */
   class CDF_2001_S4751469 : public Analysis {
   public:
 
-    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$ 
+    /// Constructor: cuts on final state are \f$ -1 < \eta < 1 \f$
     /// and \f$ p_T > 0.5 \f$ GeV.
     CDF_2001_S4751469()
       : Analysis("CDF_2001_S4751469"),
@@ -40,11 +40,11 @@
     {
       setBeams(PROTON, ANTIPROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     // Book histograms
     void init() {
       addProjection(TriggerCDFRun0Run1(), "Trigger");
@@ -53,53 +53,53 @@
       const LossyFinalState lfs(cfs, 0.08);
       addProjection(lfs, "FS");
       addProjection(FastJets(lfs, FastJets::TRACKJET, 0.7), "TrackJet");
-      
-      _numvsDeltaPhi2 =  bookProfile1D(1, 1, 1);  
-      _numvsDeltaPhi5 =  bookProfile1D(1, 1, 2);  
-      _numvsDeltaPhi30 = bookProfile1D(1, 1, 3);  
-      _pTvsDeltaPhi2 =   bookProfile1D(2, 1, 1);  
-      _pTvsDeltaPhi5 =   bookProfile1D(2, 1, 2);  
+   
+      _numvsDeltaPhi2 =  bookProfile1D(1, 1, 1);
+      _numvsDeltaPhi5 =  bookProfile1D(1, 1, 2);
+      _numvsDeltaPhi30 = bookProfile1D(1, 1, 3);
+      _pTvsDeltaPhi2 =   bookProfile1D(2, 1, 1);
+      _pTvsDeltaPhi5 =   bookProfile1D(2, 1, 2);
       _pTvsDeltaPhi30 =  bookProfile1D(2, 1, 3);
-      
+   
       _numTowardMB = bookProfile1D(3, 1, 1);
       _numTransMB = bookProfile1D(3, 1, 2);
       _numAwayMB = bookProfile1D(3, 1, 3);
       _numTowardJ20 = bookProfile1D(4, 1, 1);
       _numTransJ20 = bookProfile1D(4, 1, 2);
       _numAwayJ20 = bookProfile1D(4, 1, 3);
-      
+   
       _ptsumTowardMB = bookProfile1D(5, 1, 1);
       _ptsumTransMB = bookProfile1D(5, 1, 2);
       _ptsumAwayMB = bookProfile1D(5, 1, 3);
       _ptsumTowardJ20 = bookProfile1D(6, 1, 1);
       _ptsumTransJ20 = bookProfile1D(6, 1, 2);
       _ptsumAwayJ20 = bookProfile1D(6, 1, 3);
-      
+   
       _ptTrans2 = bookHistogram1D(7, 1, 1);
       _ptTrans5 = bookHistogram1D(7, 1, 2);
       _ptTrans30 = bookHistogram1D(7, 1, 3);
     }
-    
+ 
 
     /// Do the analysis
     void analyze(const Event& event) {
       // Trigger
       const bool trigger = applyProjection<TriggerCDFRun0Run1>(event, "Trigger").minBiasDecision();
       if (!trigger) vetoEvent;
-      
+   
       // Analyse, with pT > 0.5 GeV AND |eta| < 1
       const JetAlg& tj = applyProjection<JetAlg>(event, "TrackJet");
-      
+   
       // Get jets, sorted by pT
       const Jets jets = tj.jetsByPt();
-      if (jets.empty()) { 
-        vetoEvent; 
+      if (jets.empty()) {
+        vetoEvent;
       }
 
       Jet leadingJet = jets.front();
       const double phiLead = leadingJet.ptWeightedPhi();
       const double ptLead = leadingJet.ptSum();
-      
+   
       // Cut on highest pT jet: combined 0.5 GeV < pT(lead) < 50 GeV
       if (ptLead/GeV < 0.5) vetoEvent;
       if (ptLead/GeV > 50.0) vetoEvent;
@@ -111,13 +111,13 @@
       if (ptLead/GeV > 2.0) {
         _sumWeightsPtLead2 += weight;
       }
-      if (ptLead/GeV > 5.0) { 
+      if (ptLead/GeV > 5.0) {
         _sumWeightsPtLead5 += weight;
       }
       if (ptLead/GeV > 30.0) {
         _sumWeightsPtLead30 += weight;
       }
-      
+   
       // Run over tracks
       double ptSumToward(0.0), ptSumAway(0.0), ptSumTrans(0.0);
       size_t numToward(0), numTrans(0), numAway(0);
@@ -132,14 +132,14 @@
         foreach (const FourMomentum& p, j) {
           // Calculate Delta(phi) from leading jet
           const double dPhi = deltaPhi(p.azimuthalAngle(), phiLead);
-          
+       
           // Get pT sum and multiplicity values for each region
           // (each is 1 number for each region per event)
           /// @todo Include event weight factor?
           if (dPhi < PI/3.0) {
             ptSumToward += p.pT();
-            ++numToward;            
-          } 
+            ++numToward;
+          }
           else if (dPhi < 2*PI/3.0) {
             ptSumTrans += p.pT();
             ++numTrans;
@@ -148,7 +148,7 @@
               _ptTrans2->fill(p.pT()/GeV, weight);
               _totalNumTrans2 += weight;
             }
-            if (ptLead/GeV > 5.0) { 
+            if (ptLead/GeV > 5.0) {
               _ptTrans5->fill(p.pT()/GeV, weight);
               _totalNumTrans5 += weight;
             }
@@ -161,14 +161,14 @@
             ptSumAway += p.pT();
             ++numAway;
           }
-          
+       
           // Fill tmp histos to bin event's track Nch & pT in dphi
           const double dPhideg = 180*dPhi/PI;
           if (ptLead/GeV > 2.0) {
             hist_num_dphi_2.fill(dPhideg, 1);
             hist_pt_dphi_2.fill (dPhideg, p.pT()/GeV);
           }
-          if (ptLead/GeV > 5.0) { 
+          if (ptLead/GeV > 5.0) {
             hist_num_dphi_5.fill(dPhideg, 1);
             hist_pt_dphi_5.fill (dPhideg, p.pT()/GeV);
           }
@@ -184,7 +184,7 @@
       for (int i= 0; i < 50; i++) {
         if (ptLead/GeV > 2.0) {
           _numvsDeltaPhi2->fill(hist_num_dphi_2.binMean(i), hist_num_dphi_2.binHeight(i), weight);
-          _pTvsDeltaPhi2->fill(hist_pt_dphi_2.binMean(i), hist_pt_dphi_2.binHeight(i), weight); 
+          _pTvsDeltaPhi2->fill(hist_pt_dphi_2.binMean(i), hist_pt_dphi_2.binHeight(i), weight);
         }
         if (ptLead/GeV > 5.0) {
           _numvsDeltaPhi5->fill(hist_num_dphi_5.binMean(i), hist_num_dphi_5.binHeight(i), weight);
@@ -195,53 +195,53 @@
           _pTvsDeltaPhi30->fill(hist_pt_dphi_30.binMean(i), hist_pt_dphi_30.binHeight(i), weight);
         }
       }
-      
+   
       // Log some event details about pT
-      getLog() << Log::DEBUG 
+      getLog() << Log::DEBUG
                << "pT [lead; twd, away, trans] = ["
-               << ptLead << "; " 
-               << ptSumToward << ", " 
-               << ptSumAway << ", " 
-               << ptSumTrans << "]" 
+               << ptLead << "; "
+               << ptSumToward << ", "
+               << ptSumAway << ", "
+               << ptSumTrans << "]"
                << endl;
 
-      // Update the pT profile histograms      
+      // Update the pT profile histograms
       _ptsumTowardMB->fill(ptLead/GeV, ptSumToward/GeV, weight);
       _ptsumTowardJ20->fill(ptLead/GeV, ptSumToward/GeV, weight);
-      
+   
       _ptsumTransMB->fill(ptLead/GeV, ptSumTrans/GeV, weight);
       _ptsumTransJ20->fill(ptLead/GeV, ptSumTrans/GeV, weight);
-      
+   
       _ptsumAwayMB->fill(ptLead/GeV, ptSumAway/GeV, weight);
       _ptsumAwayJ20->fill(ptLead/GeV, ptSumAway/GeV, weight);
-      
+   
       // Log some event details about Nch
-      getLog() << Log::DEBUG 
+      getLog() << Log::DEBUG
                << "N [twd, away, trans] = ["
-               << numToward << ", " 
-               << numTrans << ", " 
-               << numAway << "]" 
+               << numToward << ", "
+               << numTrans << ", "
+               << numAway << "]"
                << endl;
-      
+   
       // Update the N_jet profile histograms
       _numTowardMB->fill(ptLead/GeV, numToward, weight);
       _numTowardJ20->fill(ptLead/GeV, numToward, weight);
-      
+   
       _numTransMB->fill(ptLead/GeV, numTrans, weight);
       _numTransJ20->fill(ptLead/GeV, numTrans, weight);
-      
+   
       _numAwayMB->fill(ptLead/GeV, numAway, weight);
       _numAwayJ20->fill(ptLead/GeV, numAway, weight);
     }
-    
-    
+ 
+ 
     /// Normalize histos
     void finalize() {
       normalize(_ptTrans2, _totalNumTrans2 / _sumWeightsPtLead2);
       normalize(_ptTrans5, _totalNumTrans5 / _sumWeightsPtLead5);
       normalize(_ptTrans30, _totalNumTrans30 / _sumWeightsPtLead30);
     }
-    
+ 
     //@}
 
 
@@ -283,7 +283,7 @@
 
   };
 
-    
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2001_S4751469> plugin_CDF_2001_S4751469;

Modified: trunk/src/Analyses/CDF_2002_S4796047.cc
==============================================================================
--- trunk/src/Analyses/CDF_2002_S4796047.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2002_S4796047.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,22 +11,22 @@
   /*
    * @brief CDF Run I charged multiplicity measurement
    * @author Hendrik Hoeth
-   * 
+   *
    * This analysis measures the charged multiplicity distribution
    * in minimum bias events at two different center-of-mass energies:
    * \f$ \sqrt{s} = \f$ 630 and 1800 GeV.
-   * 
+   *
    * Particles with c*tau > 10 mm are considered stable, i.e. they
    * are reconstructed and their decay products removed. Selection
    * cuts are |eta|<1 and pT>0.4 GeV.
-   * 
-   * 
+   *
+   *
    * @par Run conditions
-   * 
+   *
    * @arg Two different beam energies: \f$ \sqrt{s} = \$f 630 & 1800 GeV
    * @arg Run with generic QCD events.
    * @arg Set particles with c*tau > 10 mm stable
-   * 
+   *
    */
   class CDF_2002_S4796047 : public Analysis {
   public:
@@ -34,14 +34,14 @@
     /// Constructor
     CDF_2002_S4796047()
       : Analysis("CDF_2002_S4796047")
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
     }
 
 
     /// @name Analysis methods
     //@{
-    
+ 
     /// Book projections and histograms
     void init() {
       addProjection(TriggerCDFRun0Run1(), "Trigger");
@@ -54,8 +54,8 @@
       _hist_pt_vs_multiplicity_630  = bookProfile1D(3, 1, 1);
       _hist_pt_vs_multiplicity_1800 = bookProfile1D(4, 1, 1);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& evt) {
       // Trigger
@@ -71,7 +71,7 @@
       // Fill histos of charged multiplicity distributions
       if (fuzzyEquals(sqrtS, 630/GeV)) {
         _hist_multiplicity_630->fill(numParticles, weight);
-      } 
+      }
       else if (fuzzyEquals(sqrtS, 1800/GeV)) {
         _hist_multiplicity_1800->fill(numParticles, weight);
       }
@@ -86,9 +86,9 @@
           _hist_pt_vs_multiplicity_1800->fill(numParticles, pT/GeV, weight);
         }
       }
-      
+   
     }
-    
+ 
 
     void finalize() {
       /// @todo Get cross-section from the generator

Modified: trunk/src/Analyses/CDF_2004_S5839831.cc
==============================================================================
--- trunk/src/Analyses/CDF_2004_S5839831.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2004_S5839831.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,10 +20,10 @@
   class CDF_2004_S5839831 : public Analysis {
   public:
 
-    /// Constructor: cuts on charged final state are \f$ -1 < \eta < 1 \f$ 
+    /// Constructor: cuts on charged final state are \f$ -1 < \eta < 1 \f$
     /// and \f$ p_T > 0.4 \f$ GeV.
-    CDF_2004_S5839831() 
-      : Analysis("CDF_2004_S5839831") 
+    CDF_2004_S5839831()
+      : Analysis("CDF_2004_S5839831")
     {
       setBeams(PROTON, ANTIPROTON);
     }
@@ -36,23 +36,23 @@
       unsigned int numMax, numMin;
       double ptMax, ptMin, ptDiff;
     };
-    
-    
-    ConesInfo _calcTransCones(const double etaLead, const double phiLead, 
+ 
+ 
+    ConesInfo _calcTransCones(const double etaLead, const double phiLead,
                               const ParticleVector& tracks) {
       const double phiTransPlus = mapAngle0To2Pi(phiLead + PI/2.0);
       const double phiTransMinus = mapAngle0To2Pi(phiLead - PI/2.0);
-      getLog() << Log::DEBUG << "phi_lead = " << phiLead 
-               << " -> trans = (" << phiTransPlus 
+      getLog() << Log::DEBUG << "phi_lead = " << phiLead
+               << " -> trans = (" << phiTransPlus
                << ", " << phiTransMinus << ")" << endl;
-      
+   
       unsigned int numPlus(0), numMinus(0);
       double ptPlus(0), ptMinus(0);
       // Run over all charged tracks
       foreach (const Particle& t, tracks) {
         FourMomentum trackMom = t.momentum();
         const double pt = trackMom.pT();
-        
+     
         // Find if track mom is in either transverse cone
         if (deltaR(trackMom, etaLead, phiTransPlus) < 0.7) {
           ptPlus += pt;
@@ -62,7 +62,7 @@
           numMinus += 1;
         }
       }
-      
+   
       ConesInfo rtn;
       // Assign N_{min,max} from N_{plus,minus}
       rtn.numMax = (ptPlus >= ptMinus) ? numPlus : numMinus;
@@ -71,17 +71,17 @@
       rtn.ptMax = (ptPlus >= ptMinus) ? ptPlus : ptMinus;
       rtn.ptMin = (ptPlus >= ptMinus) ? ptMinus : ptPlus;
       rtn.ptDiff = fabs(rtn.ptMax - rtn.ptMin);
-      
-      getLog() << Log::DEBUG << "Min cone has " << rtn.numMin << " tracks -> " 
+   
+      getLog() << Log::DEBUG << "Min cone has " << rtn.numMin << " tracks -> "
                << "pT_min = " << rtn.ptMin/GeV << " GeV" << endl;
-      getLog() << Log::DEBUG << "Max cone has " << rtn.numMax << " tracks -> " 
+      getLog() << Log::DEBUG << "Max cone has " << rtn.numMax << " tracks -> "
                << "pT_max = " << rtn.ptMax/GeV << " GeV" << endl;
-      
+   
       return rtn;
     }
-    
-    
-    ConesInfo _calcTransCones(const FourMomentum& leadvec, 
+ 
+ 
+    ConesInfo _calcTransCones(const FourMomentum& leadvec,
                               const ParticleVector& tracks) {
       const double etaLead = leadvec.pseudorapidity();
       const double phiLead = leadvec.azimuthalAngle();
@@ -91,7 +91,7 @@
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Set up projections
       addProjection(TriggerCDFRun0Run1(), "Trigger");
@@ -111,7 +111,7 @@
 
       // Book histograms
       _pt90MaxAvg1800 = bookProfile1D(1, 1, 1);
-      _pt90MinAvg1800 = bookProfile1D(1, 1, 2); 
+      _pt90MinAvg1800 = bookProfile1D(1, 1, 2);
       _pt90Max1800 = bookProfile1D(2, 1, 1);
       _pt90Min1800 = bookProfile1D(2, 1, 2);
       _pt90Diff1800 = bookProfile1D(2, 1, 3);
@@ -119,25 +119,25 @@
       _num90Min1800 = bookProfile1D(4, 1, 2);
       _pTSum1800_2Jet = bookProfile1D(7, 1, 1);
       _pTSum1800_3Jet = bookProfile1D(7, 1, 2);
-      _pt90Max630 = bookProfile1D(8, 1, 1); 
-      _pt90Min630 = bookProfile1D(8, 1, 2); 
-      _pt90Diff630 = bookProfile1D(8, 1, 3); 
+      _pt90Max630 = bookProfile1D(8, 1, 1);
+      _pt90Min630 = bookProfile1D(8, 1, 2);
+      _pt90Diff630 = bookProfile1D(8, 1, 3);
       _pTSum630_2Jet = bookProfile1D(9, 1, 1);
-      _pTSum630_3Jet = bookProfile1D(9, 1, 2);       
-      
+      _pTSum630_3Jet = bookProfile1D(9, 1, 2);
+   
       _pt90Dbn1800Et40 = bookHistogram1D(3, 1, 1);
       _pt90Dbn1800Et80 = bookHistogram1D(3, 1, 2);
       _pt90Dbn1800Et120 = bookHistogram1D(3, 1, 3);
       _pt90Dbn1800Et160 = bookHistogram1D(3, 1, 4);
       _pt90Dbn1800Et200 = bookHistogram1D(3, 1, 5);
       _ptDbn1800MB = bookHistogram1D(6, 1, 1);
-      
+   
       _numTracksDbn1800MB = bookHistogram1D(5, 1, 1);
       _numTracksDbn630MB = bookHistogram1D(10, 1, 1);
       _ptDbn630MB = bookHistogram1D(11, 1, 1);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& event) {
       // Trigger
@@ -147,7 +147,7 @@
       // Get sqrt(s) and event weight
       const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
       const double weight = event.weight();
-      
+   
       {
         getLog() << Log::DEBUG << "Running max/min analysis" << endl;
         vector<Jet> jets = applyProjection<JetAlg>(event, "Jets").jetsByE();
@@ -159,7 +159,7 @@
           const double ETlead = leadingjet.EtSum();
           getLog() << Log::DEBUG << "Leading Et = " << ETlead/GeV << " GeV" << endl;
           if (fabs(etaLead) > 0.5 && ETlead < 15*GeV) {
-            getLog() << Log::DEBUG << "Leading jet eta = " << etaLead 
+            getLog() << Log::DEBUG << "Leading jet eta = " << etaLead
                      << " not in |eta| < 0.5 & pT > 15 GeV" << endl;
           } else {
             // Multiplicity & pT distributions for sqrt(s) = 630 GeV, 1800 GeV
@@ -191,12 +191,12 @@
                 _pt90Dbn1800Et200->fill(ptTransTotal/GeV, weight);
               }
             }
-            
+         
           }
         }
       }
-      
-      
+   
+   
       // Fill min bias total track multiplicity histos
       {
         getLog() << Log::DEBUG << "Running min bias multiplicity analysis" << endl;
@@ -218,9 +218,9 @@
           }
         }
       }
-      
-      
-      
+   
+   
+   
       // Construct "Swiss Cheese" pT distributions, with pT contributions from
       // tracks within R = 0.7 of the 1st, 2nd (and 3rd) jets being ignored. A
       // different set of charged tracks, with |eta| < 1.0, is used here, and all
@@ -237,19 +237,19 @@
             fabs(cheesejets[0].momentum().pseudorapidity()) <= 0.5 &&
             cheesejets[0].momentum().Et()/GeV > 5.0 &&
             cheesejets[1].momentum().Et()/GeV > 5.0) {
-          
+       
           const double cheeseETlead = cheesejets[0].momentum().Et();
-          
+       
           const double eta1 = cheesejets[0].momentum().pseudorapidity();
           const double phi1 = cheesejets[0].momentum().azimuthalAngle();
           const double eta2 = cheesejets[1].momentum().pseudorapidity();
           const double phi2 = cheesejets[1].momentum().azimuthalAngle();
-          
+       
           double ptSumSub2(0), ptSumSub3(0);
           foreach (const Particle& t, cheesetracks) {
             FourMomentum trackMom = t.momentum();
             const double pt = trackMom.pT();
-            
+         
             // Subtracting 2 leading jets
             const double deltaR1 = deltaR(trackMom, eta1, phi1);
             const double deltaR2 = deltaR(trackMom, eta2, phi2);
@@ -261,9 +261,9 @@
                      << "|(" << eta2 << ", " << phi2 << ")| = " << deltaR2 << endl;
             if (deltaR1 > 0.7 && deltaR2 > 0.7) {
               ptSumSub2 += pt;
-              
+           
               // Subtracting 3rd leading jet
-              if (cheesejets.size() > 2 && 
+              if (cheesejets.size() > 2 &&
                   cheesejets[2].momentum().Et()/GeV > 5.0) {
                 const double eta3 = cheesejets[2].momentum().pseudorapidity();
                 const double phi3 = cheesejets[2].momentum().azimuthalAngle();
@@ -277,7 +277,7 @@
               }
             }
           }
-          
+       
           // Swiss Cheese sub 2,3 jets distributions for sqrt(s) = 630 GeV, 1800 GeV
           if (fuzzyEquals(sqrtS/GeV, 630)) {
             if (!isZero(ptSumSub2)) _pTSum630_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
@@ -286,21 +286,21 @@
             if (!isZero(ptSumSub2))_pTSum1800_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
             if (!isZero(ptSumSub3))_pTSum1800_3Jet->fill(cheeseETlead/GeV, ptSumSub3/GeV, weight);
           }
-          
+       
         }
-      }      
-      
+      }
+   
     }
-    
-    
-    void finalize() { 
+ 
+ 
+    void finalize() {
       // Normalize to actual number of entries in pT dbn histos
       normalize(_pt90Dbn1800Et40,  1656.75);
       normalize(_pt90Dbn1800Et80,  4657.5);
       normalize(_pt90Dbn1800Et120, 5395.5);
       normalize(_pt90Dbn1800Et160, 7248.75);
       normalize(_pt90Dbn1800Et200, 2442.0);
-      
+   
       // and for min bias distributions:
       normalize(_numTracksDbn1800MB, 309718.25);
       normalize(_numTracksDbn630MB, 1101024.0);
@@ -316,13 +316,13 @@
     /// @name Histogram collections
     //@{
     /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
-    /// the average \f$ p_T \f$ in the toward, transverse and away regions at 
+    /// the average \f$ p_T \f$ in the toward, transverse and away regions at
     /// \f$ \sqrt{s} = 1800 \text{GeV} \f$.
     /// Corresponds to Table 1, and HepData table 1.
     AIDA::IProfile1D *_pt90MaxAvg1800, *_pt90MinAvg1800;
 
     /// Profile histograms, binned in the \f$ E_T \f$ of the leading jet, for
-    /// the \f$ p_T \f$ sum in the toward, transverse and away regions at 
+    /// the \f$ p_T \f$ sum in the toward, transverse and away regions at
     /// \f$ \sqrt{s} = 1800 \text{GeV} \f$.
     /// Corresponds to figure 2/3, and HepData table 2.
     AIDA::IProfile1D *_pt90Max1800, *_pt90Min1800, *_pt90Diff1800;
@@ -348,23 +348,23 @@
     /// Corresponds to figure 9, and HepData table 9.
     AIDA::IProfile1D *_pTSum630_2Jet, *_pTSum630_3Jet;
 
-    /// Histogram of \f$ p_{T\text{sum}} \f$ distribution for 5 different 
+    /// Histogram of \f$ p_{T\text{sum}} \f$ distribution for 5 different
     /// \f$ E_{T1} \f$ bins.
     /// Corresponds to figure 4, and HepData table 3.
-    AIDA::IHistogram1D *_pt90Dbn1800Et40, *_pt90Dbn1800Et80, *_pt90Dbn1800Et120, 
+    AIDA::IHistogram1D *_pt90Dbn1800Et40, *_pt90Dbn1800Et80, *_pt90Dbn1800Et120,
       *_pt90Dbn1800Et160, *_pt90Dbn1800Et200;
 
-    /// Histograms of track multiplicity and \f$ p_T \f$ distributions for 
+    /// Histograms of track multiplicity and \f$ p_T \f$ distributions for
     /// minimum bias events.
     /// Figure 6, and HepData tables 5 & 6.
     /// Figure 10, and HepData tables 10 & 11.
     AIDA::IHistogram1D *_numTracksDbn1800MB, *_ptDbn1800MB;
     AIDA::IHistogram1D *_numTracksDbn630MB, *_ptDbn630MB;
     //@}
-    
+ 
   };
-    
-    
+ 
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2004_S5839831> plugin_CDF_2004_S5839831;

Modified: trunk/src/Analyses/CDF_2005_S6080774.cc
==============================================================================
--- trunk/src/Analyses/CDF_2005_S6080774.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2005_S6080774.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -24,7 +24,7 @@
     void init() {
       FinalState fs;
       addProjection(fs, "FS");
-      
+   
       IdentifiedFinalState ifs(-0.9, 0.9, 13.0*GeV);
       ifs.acceptId(PHOTON);
       addProjection(ifs, "IFS");
@@ -39,13 +39,13 @@
 
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       ParticleVector photons = applyProjection<IdentifiedFinalState>(event, "IFS").particlesByPt();
       if (photons.size() < 2 ||
           (photons[0].momentum().pT() < 14.0*GeV)) {
         vetoEvent;
       }
-      
+   
       // Isolate photons with ET_sum in cone
       ParticleVector isolated_photons;
       ParticleVector fs = applyProjection<FinalState>(event, "FS").particles();
@@ -62,27 +62,27 @@
           isolated_photons.push_back(photon);
         }
       }
-      
+   
       if (isolated_photons.size() != 2) {
         vetoEvent;
       }
-      
+   
       FourMomentum mom_PP = isolated_photons[0].momentum() + isolated_photons[1].momentum();
       for (size_t i=0; i<4; ++i) {
         _h_m_PP[i]->fill(mom_PP.mass(), weight);
         _h_pT_PP[i]->fill(mom_PP.pT(), weight);
         _h_dphi_PP[i]->fill(mapAngle0ToPi(isolated_photons[0].momentum().phi()-
                                           isolated_photons[1].momentum().phi())/M_PI, weight);
-      }      
+      }
     }
-    
-    
+ 
+ 
     void finalize() {
       for (size_t i=0; i<4; ++i) {
         scale(_h_m_PP[i], crossSection()/sumOfWeights());
         scale(_h_pT_PP[i], crossSection()/sumOfWeights());
         scale(_h_dphi_PP[i], crossSection()/sumOfWeights());
-      }  
+      }
     }
 
     //@}
@@ -96,8 +96,8 @@
     std::vector<AIDA::IHistogram1D*> _h_pT_PP;
     std::vector<AIDA::IHistogram1D*> _h_dphi_PP;
     //@}
-    
-    
+ 
+ 
   };
 
 

Modified: trunk/src/Analyses/CDF_2005_S6217184.cc
==============================================================================
--- trunk/src/Analyses/CDF_2005_S6217184.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2005_S6217184.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,65 +9,65 @@
 
 namespace Rivet {
 
-  
+
   /* CDF Run II jet shape analysis
    * @author Lars Sonnenschein
    * @author Andy Buckley
    */	
-  class CDF_2005_S6217184 : public Analysis {    
+  class CDF_2005_S6217184 : public Analysis {
   public:
-    
+ 
     /// Constructor
     CDF_2005_S6217184()
       : Analysis("CDF_2005_S6217184")
     {
       setBeams(PROTON, ANTIPROTON);
     }
-        
+     
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Set up projections
       const FinalState fs(-2.0, 2.0);
       addProjection(fs, "FS");
-      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7), "Jets"); 
+      addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7), "Jets");
       addProjection(TotalVisibleMomentum(fs), "CalMET");
       addProjection(PVertex(), "PV");
-      
+   
       // Veto (anti)neutrinos, and muons with pT above 1.0 GeV
       VetoedFinalState vfs(fs);
       vfs.vetoNeutrinos();
       vfs.addVetoPairDetail(MUON, 1.0*GeV, MAXDOUBLE);
       addProjection(vfs, "VFS");
       addProjection(JetShape(vfs, _jetaxes, 0.0, 0.7, 0.1, 0.3), "JetShape");
-      
+   
       // Specify pT bins
-      _pTbins += 37.0, 45.0, 55.0, 63.0, 73.0, 84.0, 97.0, 112.0, 128.0, 
+      _pTbins += 37.0, 45.0, 55.0, 63.0, 73.0, 84.0, 97.0, 112.0, 128.0,
         148.0, 166.0, 186.0, 208.0, 229.0, 250.0, 277.0, 304.0, 340.0, 380.0;
 
       /// Book histograms
       // 18 = 6x3 pT bins, one histogram each
-      for (size_t i = 0; i < 6; ++i) { 
+      for (size_t i = 0; i < 6; ++i) {
         for (size_t j = 0; j < 3; ++j) {
           size_t k = i*3 + j;
           _profhistRho_pT[k] = bookProfile1D(i+1, 1, j+1);
           _profhistPsi_pT[k] = bookProfile1D(6+i+1, 1, j+1);
         }
-      }    
-      
+      }
+   
       _profhistPsi = bookProfile1D(13, 1, 1);
     }
-    
-    
-    
+ 
+ 
+ 
     /// Do the analysis
     void analyze(const Event& event) {
-      
+   
       // Get jets and require at least one to pass pT and y cuts
       const Jets jets = applyProjection<FastJets>(event, "Jets").jetsByPt();
       getLog() << Log::DEBUG << "Jet multiplicity before cuts = " << jets.size() << endl;
-      
+   
       // Determine the central jet axes
       _jetaxes.clear();
       foreach (const Jet& jt, jets) {
@@ -77,11 +77,11 @@
         }
       }
       if (_jetaxes.empty()) vetoEvent;
-      
+   
       // Calculate and histogram jet shapes
       const double weight = event.weight();
       const JetShape& js = applyProjection<JetShape>(event, "JetShape");
-      
+   
       /// @todo Use BinnedHistogram, for collections of histos each for a range of values of an extra variable
       for (size_t jind = 0; jind < _jetaxes.size(); ++jind) {
         for (size_t ipT = 0; ipT < 18; ++ipT) {
@@ -98,15 +98,15 @@
           }
         }
       }
-      
+   
     }
-    
-    
+ 
+ 
     // Finalize
-    void finalize() {  
+    void finalize() {
       //
     }
-    
+ 
     //@}
 
 
@@ -131,7 +131,7 @@
     //@}
 
   };
-    
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2005_S6217184> plugin_CDF_2005_S6217184;

Modified: trunk/src/Analyses/CDF_2006_S6450792.cc
==============================================================================
--- trunk/src/Analyses/CDF_2006_S6450792.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2006_S6450792.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,11 +15,11 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       FinalState fs;
       addProjection(FastJets(fs, FastJets::CDFMIDPOINT, 0.7, 61.0*GeV), "ConeFinder");
@@ -37,8 +37,8 @@
         }
       }
     }
-    
-    
+ 
+ 
     void finalize() {
       const double delta_y = 1.2;
       scale(_h_jet_pt, crossSection()/nanobarn/sumOfWeights()/delta_y);

Modified: trunk/src/Analyses/CDF_2006_S6653332.cc
==============================================================================
--- trunk/src/Analyses/CDF_2006_S6653332.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2006_S6653332.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,7 +13,7 @@
 namespace Rivet {
 
 
-  /* @brief CDF Run II analysis: jet \f$ p_T \f$ and \f$ \eta \f$ 
+  /* @brief CDF Run II analysis: jet \f$ p_T \f$ and \f$ \eta \f$
    *   distributions in Z + (b) jet production
    * @author Lars Sonnenschein
    *
@@ -24,11 +24,11 @@
   public:
 
     /// Constructor
-    CDF_2006_S6653332()  
+    CDF_2006_S6653332()
       : Analysis("CDF_2006_S6653332"),
         _Rjet(0.7), _JetPtCut(20.), _JetEtaCut(1.5),
         _sumWeightsWithZ(0.0), _sumWeightsWithZJet(0.0)
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
@@ -40,8 +40,8 @@
     void init() {
       const FinalState fs(-3.6, 3.6);
       addProjection(fs, "FS");
-      
-      // Create a final state with any e+e- or mu+mu- pair with 
+   
+      // Create a final state with any e+e- or mu+mu- pair with
       // invariant mass 76 -> 106 GeV and ET > 20 (Z decay products)
       vector<pair<long,long> > vids;
       vids.push_back(make_pair(ELECTRON, POSITRON));
@@ -49,7 +49,7 @@
       FinalState fs2(-3.6, 3.6);
       InvMassFinalState invfs(fs2, vids, 76*GeV, 106*GeV);
       addProjection(invfs, "INVFS");
-      
+   
       // Make a final state without the Z decay products for jet clustering
       VetoedFinalState vfs(fs);
       vfs.addVetoOnThisFinalState(invfs);
@@ -59,52 +59,52 @@
       // Book histograms
       _sigmaBJet = bookHistogram1D(1, 1, 1);
       _ratioBJetToZ = bookHistogram1D(2, 1, 1);
-      _ratioBJetToJet = bookHistogram1D(3, 1, 1);  
+      _ratioBJetToJet = bookHistogram1D(3, 1, 1);
     }
 
-  
+
     /// Do the analysis
     void analyze(const Event& event) {
       // Check we have an l+l- pair that passes the kinematic cuts
       // Get the Z decay products (mu+mu- or e+e- pair)
       const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
       const ParticleVector&  ZDecayProducts =  invMassFinalState.particles();
-      
-      // Make sure we have at least 2 Z decay products (mumu or ee) 
-      if (ZDecayProducts.size() < 2) vetoEvent;      
-      _sumWeightsWithZ += event.weight();      
+   
+      // Make sure we have at least 2 Z decay products (mumu or ee)
+      if (ZDecayProducts.size() < 2) vetoEvent;
+      _sumWeightsWithZ += event.weight();
       // @todo: write out a warning if there are more than two decay products
       FourMomentum Zmom = ZDecayProducts[0].momentum() +  ZDecayProducts[1].momentum();
-      
+   
       // Put all b-quarks in a vector
       /// @todo Use jet contents rather than accessing quarks directly
       ParticleVector bquarks;
       /// @todo Use nicer looping
-      for (GenEvent::particle_const_iterator p = event.genEvent().particles_begin(); 
+      for (GenEvent::particle_const_iterator p = event.genEvent().particles_begin();
            p != event.genEvent().particles_end(); ++p) {
         if ( fabs((*p)->pdg_id()) == BQUARK ) {
           bquarks.push_back(Particle(**p));
         }
       }
-      
-      // Get jets 
+   
+      // Get jets
       const FastJets& jetpro = applyProjection<FastJets>(event, "Jets");
       getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
-      
+   
       const PseudoJets& jets = jetpro.pseudoJetsByPt();
       getLog() << Log::DEBUG << "jetlist size = " << jets.size() << endl;
-      
+   
       int numBJet = 0;
       int numJet  = 0;
       // for each b-jet plot the ET and the eta of the jet, normalise to the total cross section at the end
-      // for each event plot N jet and pT(Z), normalise to the total cross section at the end 
+      // for each event plot N jet and pT(Z), normalise to the total cross section at the end
       for (PseudoJets::const_iterator jt = jets.begin(); jt != jets.end(); ++jt) {
         // select jets that pass the kinematic cuts
         if (jt->perp() > _JetPtCut && fabs(jt->rapidity()) <= _JetEtaCut) {
           ++numJet;
           // Does the jet contain a b-quark?
           /// @todo Use jet contents rather than accessing quarks directly
-          
+       
           bool bjet = false;
           foreach (const Particle& bquark,  bquarks) {
             if (deltaR(jt->rapidity(), jt->phi(), bquark.momentum().rapidity(),bquark.momentum().azimuthalAngle()) <= _Rjet) {
@@ -117,28 +117,28 @@
           }
         }
       } // end loop around jets
-      
+   
       if (numJet > 0)    _sumWeightsWithZJet += event.weight();
       if (numBJet > 0) {
         _sigmaBJet->fill(1960.0,event.weight());
         _ratioBJetToZ->fill(1960.0,event.weight());
         _ratioBJetToJet->fill(1960.0,event.weight());
       }
-      
+   
     }
-    
-  
+ 
+
     /// Finalize
-    void finalize() { 
+    void finalize() {
       getLog() << Log::DEBUG << "Total sum of weights = " << sumOfWeights() << endl;
       getLog() << Log::DEBUG << "Sum of weights for Z production in mass range = " << _sumWeightsWithZ << endl;
       getLog() << Log::DEBUG << "Sum of weights for Z+jet production in mass range = " << _sumWeightsWithZJet << endl;
-      
+   
       _sigmaBJet->scale(crossSection()/sumOfWeights());
       _ratioBJetToZ->scale(1.0/_sumWeightsWithZ);
       _ratioBJetToJet->scale(1.0/_sumWeightsWithZJet);
     }
-    
+ 
         //@}
 
 
@@ -162,10 +162,10 @@
     AIDA::IHistogram1D* _ratioBJetToZ;
     AIDA::IHistogram1D* _ratioBJetToJet;
     //@}
-    
+ 
   };
-  
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2006_S6653332> plugin_CDF_2006_S6653332;
 

Modified: trunk/src/Analyses/CDF_2007_S7057202.cc
==============================================================================
--- trunk/src/Analyses/CDF_2007_S7057202.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2007_S7057202.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,7 +6,7 @@
 #include "Rivet/Projections/FastJets.hh"
 
 namespace Rivet {
-  
+
 
   /// @brief CDF Run II inclusive jet cross-section using the kT algorithm.
   /// @author James Monk
@@ -23,7 +23,7 @@
       setNeedsCrossSection(true);
     }
 
-    
+ 
     /// @name Analysis methods
     //@{
 
@@ -43,12 +43,12 @@
       _binnedHistosD07.addHistogram(0.7, 1.1, bookHistogram1D(3, 1, 1));
       _binnedHistosD07.addHistogram(1.1, 1.6, bookHistogram1D(4, 1, 1));
       _binnedHistosD07.addHistogram(1.6, 2.1, bookHistogram1D(5, 1, 1));
-      
+   
       size_t yind = 0;
       for (vector<AIDA::IHistogram1D*>::const_iterator histIt = _binnedHistosD07.getHistograms().begin();
            histIt != _binnedHistosD07.getHistograms().end(); ++histIt){
         _eventsPassed[*histIt] = 0.0;
-        _yBinWidths[*histIt] = 2.0 * (_ybins[yind+1]-_ybins[yind]); 
+        _yBinWidths[*histIt] = 2.0 * (_ybins[yind+1]-_ybins[yind]);
         ++yind;
       }
       _eventsPassed[_histoD05] = 0.0;
@@ -56,12 +56,12 @@
       _eventsPassed[_histoD10] = 0.0;
       _yBinWidths[_histoD10] = 2.0*(-_ybins[1]+_ybins[2]);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& event) {
-      const double weight = event.weight();    
-      
+      const double weight = event.weight();
+   
       const PseudoJets jetListD07 = applyProjection<FastJets>(event, "JetsD07").pseudoJets();
       set< IHistogram1D*> passed;
       /// @todo Use Jet interface rather than FastJet:PseudoJet
@@ -77,8 +77,8 @@
           }
         }
       }
-      
-      /// @todo Use Jet interface rather than FastJet:PseudoJet    
+   
+      /// @todo Use Jet interface rather than FastJet:PseudoJet
       const PseudoJets jetListD05 = applyProjection<FastJets>(event, "JetsD05").pseudoJets();
       for (PseudoJets::const_iterator jet = jetListD05.begin(); jet != jetListD05.end(); ++jet) {
         const double pt = jet->perp();
@@ -93,7 +93,7 @@
           }
         }
       }
-      
+   
       /// @todo Use Jet interface rather than FastJet:PseudoJet
       const PseudoJets jetListD10 = applyProjection<FastJets>(event, "JetsD10").pseudoJets();
       for (PseudoJets::const_iterator jet = jetListD10.begin(); jet != jetListD10.end(); ++jet){
@@ -109,14 +109,14 @@
           }
         }
       }
-    }  
-    
-    
+    }
+ 
+ 
     // Normalise histograms to cross-section
     void finalize() {
       const double xSecPerEvent = crossSection()/nanobarn / sumOfWeights();
       getLog() << Log::INFO << "Cross-section = " << crossSection()/nanobarn << " nb" << endl;
-      
+   
       for (map<IHistogram1D*,double>::iterator histIt = _eventsPassed.begin(),
              histJt = _yBinWidths.begin(); histIt != _eventsPassed.end(); ++histIt, ++histJt) {
         IHistogram1D* hist = histIt->first;
@@ -124,18 +124,18 @@
         normalize(hist, xSec);
       }
     }
-    
+ 
         //@}
-    
+ 
   private:
 
     /// Rapidity range of histograms for R=0.05 and R=1 kt jets
     const double _minY, _maxY;
-        
+     
     /// Min jet \f$ p_T \f$ cut.
     /// @todo Make static const and UPPERCASE?
     const double _jetMinPT;
-    
+ 
     /// Counter for the number of events analysed (actually the sum of weights, hence double).
     double _eventsTried;
 
@@ -165,7 +165,7 @@
 
   // Initialise static
   const double CDF_2007_S7057202::_ybins[] = { 0.0, 0.1, 0.7, 1.1, 1.6, 2.1 };
-  
+
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2007_S7057202> plugin_CDF_2007_S7057202;

Modified: trunk/src/Analyses/CDF_2008_NOTE_9351.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_NOTE_9351.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_NOTE_9351.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,28 +11,28 @@
 
   /* @brief CDF Run II underlying event in Drell-Yan
    * @author Hendrik Hoeth
-   * 
+   *
    * Measurement of the underlying event in Drell-Yan Z/gamma->e+e-
    * and Z/gamma->mu+mu- events. The reconstructed Z defines the
    * phi orientation. A Z mass window cut is applied.
-   * 
-   * 
+   *
+   *
    * @par Run conditions
-   * 
+   *
    * @arg \f$ \sqrt{s} = \f$ 1960 GeV
    * @arg produce Drell-Yan events
    * @arg Set particles with c*tau > 10 mm stable
    * @arg Z decay mode: Z -> e+e- and Z -> mu+mu-
    * @arg gamma decay mode: gamma -> e+e- and gamma -> mu+mu-
    * @arg minimum invariant mass of the fermion pair coming from the Z/gamma: 70 GeV
-   * 
-   */ 
+   *
+   */
   class CDF_2008_NOTE_9351 : public Analysis {
   public:
 
     /// Constructor
     CDF_2008_NOTE_9351() : Analysis("CDF_2008_NOTE_9351")
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
     }
 
@@ -46,7 +46,7 @@
       const ChargedFinalState clfs(-1.0, 1.0, 20*GeV);
       addProjection(cfs, "FS");
       addProjection(ChargedLeptons(clfs), "CL");
-      
+   
       // Book histograms
       _hist_tnchg      = bookProfile1D( 1, 1, 1);
       _hist_pnchg      = bookProfile1D( 2, 1, 1);
@@ -54,50 +54,50 @@
       _hist_pminnchg   = bookProfile1D( 4, 1, 1);
       _hist_pdifnchg   = bookProfile1D( 5, 1, 1);
       _hist_anchg      = bookProfile1D( 6, 1, 1);
-      
+   
       _hist_tcptsum    = bookProfile1D( 7, 1, 1);
       _hist_pcptsum    = bookProfile1D( 8, 1, 1);
       _hist_pmaxcptsum = bookProfile1D( 9, 1, 1);
       _hist_pmincptsum = bookProfile1D(10, 1, 1);
       _hist_pdifcptsum = bookProfile1D(11, 1, 1);
       _hist_acptsum    = bookProfile1D(12, 1, 1);
-      
+   
       _hist_tcptave    = bookProfile1D(13, 1, 1);
       _hist_pcptave    = bookProfile1D(14, 1, 1);
       _hist_acptave    = bookProfile1D(15, 1, 1);
-      
+   
       _hist_tcptmax    = bookProfile1D(16, 1, 1);
       _hist_pcptmax    = bookProfile1D(17, 1, 1);
       _hist_acptmax    = bookProfile1D(18, 1, 1);
-      
+   
       _hist_zptvsnchg  = bookProfile1D(19, 1, 1);
       _hist_cptavevsnchg = bookProfile1D(20, 1, 1);
       _hist_cptavevsnchgsmallzpt = bookProfile1D(21, 1, 1);
     }
-    
-    
+ 
+ 
     /// Do the analysis
     void analyze(const Event& e) {
-      
+   
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 1) {
         getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
         vetoEvent;
       }
-      
+   
       // Get the event weight
       const double weight = e.weight();
-      
+   
       // Get the leptons
       const ParticleVector& leptons = applyProjection<ChargedLeptons>(e, "CL").chargedLeptons();
-      
+   
       // We want exactly two leptons of the same flavour.
       getLog() << Log::DEBUG << "lepton multiplicity = " << leptons.size() << endl;
       if (leptons.size() != 2 || leptons[0].pdgId() != -leptons[1].pdgId() ) vetoEvent;
-      
+   
       // Lepton pT > 20 GeV
       if (leptons[0].momentum().pT()/GeV <= 20 || leptons[1].momentum().pT()/GeV <= 20) vetoEvent;
 
@@ -105,8 +105,8 @@
       const FourMomentum dilepton = leptons[0].momentum() + leptons[1].momentum();
       if (!inRange(dilepton.mass()/GeV, 70, 110) || fabs(dilepton.eta()) >= 6) vetoEvent;
       getLog() << Log::DEBUG << "Dilepton mass = " << mass(dilepton)/GeV << " GeV" << endl;
-      getLog() << Log::DEBUG << "Dilepton pT   = " << pT(dilepton)/GeV << " GeV" << endl; 
-      
+      getLog() << Log::DEBUG << "Dilepton pT   = " << pT(dilepton)/GeV << " GeV" << endl;
+   
       // Calculate the observables
       size_t   numToward(0),     numTrans1(0),     numTrans2(0),     numAway(0);
       double ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
@@ -118,12 +118,12 @@
         // Don't use the leptons
         /// @todo Replace with PID::isLepton
         if (abs(p->pdgId()) < 20) continue;
-        
+     
         const double dPhi = deltaPhi(p->momentum().phi(), phiZ);
         const double pT = p->momentum().pT();
         double rotatedphi = p->momentum().phi() - phiZ;
         while (rotatedphi < 0) rotatedphi += 2*PI;
-        
+     
         if (dPhi < PI/3.0) {
           ptSumToward += pT;
           ++numToward;
@@ -153,7 +153,7 @@
         if (pTZ < 10)
           _hist_cptavevsnchgsmallzpt->fill(numParticles-2, pT, weight);
       }
-      
+   
       // Fill the histograms
       _hist_tnchg->fill(pTZ, numToward/(4*PI/3), weight);
       _hist_pnchg->fill(pTZ, (numTrans1+numTrans2)/(4*PI/3), weight);
@@ -161,14 +161,14 @@
       _hist_pminnchg->fill(pTZ, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
       _hist_pdifnchg->fill(pTZ, abs(numTrans1-numTrans2)/(2*PI/3), weight);
       _hist_anchg->fill(pTZ, numAway/(4*PI/3), weight);
-      
+   
       _hist_tcptsum->fill(pTZ, ptSumToward/(4*PI/3), weight);
       _hist_pcptsum->fill(pTZ, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
       _hist_pmaxcptsum->fill(pTZ, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
       _hist_pmincptsum->fill(pTZ, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
       _hist_pdifcptsum->fill(pTZ, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
       _hist_acptsum->fill(pTZ, ptSumAway/(4*PI/3), weight);
-      
+   
       if (numToward > 0) {
         _hist_tcptave->fill(pTZ, ptSumToward/numToward, weight);
         _hist_tcptmax->fill(pTZ, ptMaxToward, weight);
@@ -181,16 +181,16 @@
         _hist_acptave->fill(pTZ, ptSumAway/numAway, weight);
         _hist_acptmax->fill(pTZ, ptMaxAway, weight);
       }
-      
+   
       // We need to subtract the two leptons from the number of particles to get the correct multiplicity
       _hist_zptvsnchg->fill(numParticles-2, pTZ, weight);
     }
-    
-    
-    void finalize() { 
+ 
+ 
+    void finalize() {
       //
     }
-    
+ 
     //@}
 
   private:
@@ -219,7 +219,7 @@
 
   };
 
-    
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2008_NOTE_9351> plugin_CDF_2008_NOTE_9351;
 

Modified: trunk/src/Analyses/CDF_2008_S7540469.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S7540469.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S7540469.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -22,18 +22,18 @@
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-    } 
+    }
 
-    
+ 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book histograms
     void init() {
       // Full final state
       FinalState fs(-5.0, 5.0);
       addProjection(fs, "FS");
-      
+   
       // Leading electrons in tracking acceptance
       IdentifiedFinalState elfs(-5.0, 5.0, 25.0*GeV);
       elfs.acceptIdPair(ELECTRON);
@@ -43,12 +43,12 @@
       _h_jet_pT_cross_section_incl_1jet = bookHistogram1D(2, 1, 1);
       _h_jet_pT_cross_section_incl_2jet = bookHistogram1D(3, 1, 1);
     }
-    
-    
-    /// Do the analysis 
+ 
+ 
+    /// Do the analysis
     void analyze(const Event & event) {
       const double weight = event.weight();
-      
+   
       // Skip if the event is empty
       const FinalState& fs = applyProjection<FinalState>(event, "FS");
       if (fs.empty()) {
@@ -56,7 +56,7 @@
                  << " because no final state pair found " << endl;
         vetoEvent;
       }
-      
+   
       // Find the Z candidates
       const FinalState & electronfs = applyProjection<FinalState>(event, "LeadingElectrons");
       std::vector<std::pair<Particle, Particle> > Z_candidates;
@@ -91,7 +91,7 @@
                  << " because no unique electron pair found " << endl;
         vetoEvent;
       }
-      
+   
       // Now build the jets on a FS without the electrons from the Z
       // (including their QED radiation)
       ParticleVector jetparts;
@@ -122,7 +122,7 @@
       /// @todo Allow proj creation w/o FS as ctor arg, so that calc can be used more easily.
       FastJets jetpro(fs, FastJets::CDFMIDPOINT, 0.7);
       jetpro.calc(jetparts);
-      
+   
       // Take jets with pt > 30, |eta| < 2.1:
       /// @todo Make this neater, using the JetAlg interface and the built-in sorting
       const Jets& jets = jetpro.jets();
@@ -133,16 +133,16 @@
         }
       }
       getLog() << Log::DEBUG << "Num jets above 30 GeV = " << jets_cut.size() << endl;
-      
+   
       // Return if there are no jets:
       if (jets_cut.empty()) {
         getLog() << Log::DEBUG << "No jets pass cuts " << endl;
         vetoEvent;
       }
-      
+   
       // Sort by pT:
       sort(jets_cut.begin(), jets_cut.end(), cmpJetsByPt);
-      
+   
       // cut on Delta R between jet and electrons
       foreach (const Jet& j, jets_cut) {
         Particle el = Z_candidates[0].first;
@@ -156,7 +156,7 @@
           vetoEvent;
         }
       }
-      
+   
       for (size_t njet=1; njet<=jets_cut.size(); ++njet) {
         _h_jet_multiplicity->fill(njet, weight);
       }
@@ -169,8 +169,8 @@
         }
       }
     }
-    
-    
+ 
+ 
     /// Rescale histos
     void finalize() {
       const double invlumi = crossSection()/femtobarn/sumOfWeights();
@@ -182,17 +182,17 @@
     //@}
 
   private:
-    
+ 
     /// @name Histograms
     //@{
     AIDA::IHistogram1D * _h_jet_multiplicity;
     AIDA::IHistogram1D * _h_jet_pT_cross_section_incl_1jet;
     AIDA::IHistogram1D * _h_jet_pT_cross_section_incl_2jet;
     //@}
-    
+ 
   };
-  
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2008_S7540469> plugin_CDF_2008_S7540469;
 

Modified: trunk/src/Analyses/CDF_2008_S7541902.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S7541902.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S7541902.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -29,7 +29,7 @@
 
     /// Constructor
     CDF_2008_S7541902()
-      : Analysis("CDF_2008_S7541902"),    
+      : Analysis("CDF_2008_S7541902"),
         _electronETCut(20.0*GeV), _electronETACut(1.1),
         _eTmissCut(30.0*GeV), _mTCut(20.0*GeV),
         _jetEtCutA(20.0*GeV),  _jetEtCutB(25.0*GeV), _jetETA(2.0),
@@ -42,13 +42,13 @@
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Set up projections
       // Basic FS
       FinalState fs(-3.6, 3.6);
       addProjection(fs, "FS");
-      
+   
       // Create a final state with any e-nu pair with invariant mass 65 -> 95 GeV and ET > 20 (W decay products)
       vector<pair<long,long> > vids;
       vids += make_pair(ELECTRON, NU_EBAR);
@@ -56,7 +56,7 @@
       FinalState fs2(-3.6, 3.6, 20*GeV);
       InvMassFinalState invfs(fs2, vids, 65*GeV, 95*GeV);
       addProjection(invfs, "INVFS");
-      
+   
       // Make a final state without the W decay products for jet clustering
       VetoedFinalState vfs(fs);
       vfs.addVetoOnThisFinalState(invfs);
@@ -69,17 +69,17 @@
         _histJetEt[i] = bookHistogram1D(i+1, 1, 1);
         _histJetMultRatio[i] = bookDataPointSet(5 , 1, i+1);
         _histJetMult[i]   = bookHistogram1D(i+6, 1, 1);
-      } 
+      }
       _histJetMultNorm = bookHistogram1D("norm", 1, _xpoint, _xpoint+1.);
     }
-    
+ 
 
     /// Do the analysis
     void analyze(const Event& event) {
       // Get the W decay products (electron and neutrino)
       const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
       const ParticleVector&  wDecayProducts = invMassFinalState.particles();
-      
+   
       FourMomentum electronP, neutrinoP;
       bool gotElectron(false), gotNeutrino(false);
       foreach (const Particle& p, wDecayProducts) {
@@ -93,14 +93,14 @@
           gotNeutrino = true;
         }
       }
-      
+   
       // Veto event if the electron or MET cuts fail
       if (!gotElectron || !gotNeutrino) vetoEvent;
-      
+   
       // Veto event if the MTR cut fails
       double mT2 = 2.0 * ( electronP.pT()*neutrinoP.pT() - electronP.px()*neutrinoP.px() - electronP.py()*neutrinoP.py() );
       if (sqrt(mT2) < _mTCut ) vetoEvent;
-      
+   
       // Get the jets
       const JetAlg& jetProj = applyProjection<FastJets>(event, "Jets");
       Jets theJets = jetProj.jetsByEt(_jetEtCutA);
@@ -117,7 +117,7 @@
           if (pj.Et() > _jetEtCutB) ++njetsB;
         }
       }
-      
+   
       // Jet multiplicity
       _histJetMultNorm->fill(_xpoint, event.weight());
       for (size_t i = 1; i <= njetsB; ++i) {
@@ -125,14 +125,14 @@
         if (i == 4) break;
       }
     }
-    
-    
+ 
+ 
 
     /// Finalize
-    void finalize() { 
+    void finalize() {
       const double xsec = crossSection()/sumOfWeights();
       // Get the x-axis for the ratio plots
-      /// @todo Replace with autobooking etc. once YODA in place    
+      /// @todo Replace with autobooking etc. once YODA in place
       std::vector<double> xval; xval.push_back(_xpoint);
       std::vector<double> xerr; xerr.push_back(.5);
       // Fill the first ratio histogram using the special normalisation histogram for the total cross section
@@ -146,8 +146,8 @@
         frac_err1to0 += pow(_histJetMultNorm->binError(0)/_histJetMultNorm->binHeight(0),2.);
         frac_err1to0 = sqrt(frac_err1to0);
       }
-      
-      /// @todo Replace with autobooking etc. once YODA in place    
+   
+      /// @todo Replace with autobooking etc. once YODA in place
       vector<double> yval[4]; yval[0].push_back(ratio1to0);
       vector<double> yerr[4]; yerr[0].push_back(ratio1to0*frac_err1to0);
       _histJetMultRatio[0]->setCoordinate(0,xval,xerr);
@@ -179,12 +179,12 @@
 
   private:
 
-    /// @name Cuts 
+    /// @name Cuts
     //@{
     /// Cut on the electron ET:
     double _electronETCut;
     /// Cut on the electron ETA:
-    double _electronETACut;   
+    double _electronETACut;
     /// Cut on the missing ET
     double _eTmissCut;
     /// Cut on the transverse mass squared
@@ -195,7 +195,7 @@
     double _jetEtCutB;
     /// Cut on the jet ETA
     double _jetETA;
-    //@}    
+    //@}
 
     double _xpoint;
 

Modified: trunk/src/Analyses/CDF_2008_S7782535.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S7782535.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S7782535.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -22,7 +22,7 @@
     {
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
 
     /// @name Analysis methods
     //@{
@@ -45,15 +45,15 @@
         _h_Psi_pT[i] = bookProfile1D(i+1, 2, 1);
       }
       _h_OneMinusPsi_vs_pT = bookDataPointSet(5, 1, 1);
-    }  
-    
-    
+    }
+ 
+ 
     // Do the analysis
     void analyze(const Event& event) {
-      // Get jets     
+      // Get jets
       const Jets& jets = applyProjection<FastJets>(event, "Jets").jetsByPt();
       getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jets.size() << endl;
-      
+   
       // Determine the central jet axes
       _jetaxes.clear();
       foreach (const Jet& j, jets) {
@@ -69,10 +69,10 @@
         getLog() << Log::DEBUG << "No b-jet axes in acceptance" << endl;
         vetoEvent;
       }
-      
+   
       // Determine jet shapes
       const JetShape& js = applyProjection<JetShape>(event, "JetShape");
-      
+   
       /// @todo Replace with foreach
       for (size_t jind = 0; jind < _jetaxes.size(); ++jind) {
         // Put jet in correct pT bin
@@ -92,12 +92,12 @@
           }
         }
       }
-      
+   
     }
-    
-    
+ 
+ 
     /// Finalize
-    void finalize() {  
+    void finalize() {
       vector<double> y, ey;
       for (size_t i = 0; i < _pTbins.size()-1; ++i) {
         // Get entry for rad_Psi = 0.2 bin
@@ -105,9 +105,9 @@
         y.push_back(1.0 - ph_i->binHeight(1));
         ey.push_back(ph_i->binError(1));
       }
-      _h_OneMinusPsi_vs_pT->setCoordinate(1, y, ey); 
+      _h_OneMinusPsi_vs_pT->setCoordinate(1, y, ey);
     }
-    
+ 
     //@}
 
 
@@ -130,7 +130,7 @@
 
   };
 
-  
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2008_S7782535> plugin_CDF_2008_S7782535;
 

Modified: trunk/src/Analyses/CDF_2008_S7828950.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S7828950.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S7828950.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,12 +11,12 @@
 
 
   /// CDF Run II inclusive jet cross-section using the Midpoint algorithm.
-  /// The analysis includes 1.1fb^-1 of CDF data and is the first with a 
+  /// The analysis includes 1.1fb^-1 of CDF data and is the first with a
   /// cone algorithm to include the forward region of the detector.
   /// arXiv:0807.2204 to be published in PRD
   class CDF_2008_S7828950 : public Analysis {
   public:
-    
+ 
     /// Constructor
     CDF_2008_S7828950() : Analysis("CDF_2008_S7828950")
     {
@@ -50,11 +50,11 @@
 
     // Do the analysis
     void analyze(const Event& event) {
-      const double weight = event.weight();    
+      const double weight = event.weight();
       foreach (const Jet& jet, applyProjection<FastJets>(event, "JetsM07").jets()) {
         _binnedHistosR07.fill(fabs(jet.momentum().rapidity()), jet.momentum().pT(), weight);
       }
-    }  
+    }
 
 
     // Normalise histograms to cross-section
@@ -68,7 +68,7 @@
 
 
   private:
-    
+ 
     /// @name Histograms
     //@{
 
@@ -88,7 +88,7 @@
   // Initialise static
   const double CDF_2008_S7828950::_ybins[] = { 0.0, 0.1, 0.7, 1.1, 1.6, 2.1 };
 
-  
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2008_S7828950> plugin_CDF_2008_S7828950;
 

Modified: trunk/src/Analyses/CDF_2008_S8093652.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S8093652.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S8093652.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,12 +18,12 @@
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-    } 
+    }
 
 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     /// Book histograms
     void init() {
       FinalState fs;
@@ -32,28 +32,28 @@
 
       _h_m_dijet = bookHistogram1D(1, 1, 1);
     }
-    
+ 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event & e) {
       const double weight = e.weight();
-      
+   
       const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinder");
       const Jets& jets = jetpro.jetsByPt();
-      
+   
       if (jets.size() < 2) vetoEvent;
-      
+   
       const FourMomentum j0(jets[0].momentum());
       const FourMomentum j1(jets[1].momentum());
       if (fabs(j1.rapidity()) > 1.0 || fabs(j0.rapidity()) > 1.0) {
         vetoEvent;
       }
-    
+ 
       double mjj = FourMomentum(j0+j1).mass();
       _h_m_dijet->fill(mjj, weight);
     }
-    
-    
+ 
+ 
     /// Finalize
     void finalize() {
       scale(_h_m_dijet, crossSection()/sumOfWeights());
@@ -67,7 +67,7 @@
     //@{
     AIDA::IHistogram1D* _h_m_dijet;
     //@}
-    
+ 
   };
 
 

Modified: trunk/src/Analyses/CDF_2008_S8095620.cc
==============================================================================
--- trunk/src/Analyses/CDF_2008_S8095620.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2008_S8095620.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,26 +14,26 @@
   /// Implementation of CDF Run II Z + b-jet cross section paper
   class CDF_2008_S8095620 : public Analysis {
   public:
-        
+     
     /// Constructor.
     /// jet cuts: |eta| <= 1.5
     CDF_2008_S8095620()
-      : Analysis("CDF_2008_S8095620"), 
-        _Rjet(0.7), _JetPtCut(20.), _JetEtaCut(1.5),  
+      : Analysis("CDF_2008_S8095620"),
+        _Rjet(0.7), _JetPtCut(20.), _JetEtaCut(1.5),
         _sumWeightSelected(0.0)
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Set up projections
       const FinalState fs(-3.6, 3.6);
       addProjection(fs, "FS");
-      // Create a final state with any e+e- or mu+mu- pair with 
+      // Create a final state with any e+e- or mu+mu- pair with
       // invariant mass 76 -> 106 GeV and ET > 20 (Z decay products)
       vector<pair<long,long> > vids;
       vids.push_back(make_pair(ELECTRON, POSITRON));
@@ -53,22 +53,22 @@
       _dSdNJet  = bookHistogram1D(3, 1, 1);
       _dSdNbJet = bookHistogram1D(4, 1, 1);
       _dSdZpT   = bookHistogram1D(5, 1, 1);
-    }  
-    
-   
+    }
+ 
+
     // Do the analysis
     void analyze(const Event& event) {
       // Check we have an l+l- pair that passes the kinematic cuts
       // Get the Z decay products (mu+mu- or e+e- pair)
       const InvMassFinalState& invMassFinalState = applyProjection<InvMassFinalState>(event, "INVFS");
       const ParticleVector&  ZDecayProducts =  invMassFinalState.particles();
-      
-      // make sure we have 2 Z decay products (mumu or ee) 
+   
+      // make sure we have 2 Z decay products (mumu or ee)
       if (ZDecayProducts.size() < 2) vetoEvent;
-      _sumWeightSelected += event.weight(); 
+      _sumWeightSelected += event.weight();
       // @todo: write out a warning if there are more than two decay products
       FourMomentum Zmom = ZDecayProducts[0].momentum() +  ZDecayProducts[1].momentum();
-      
+   
       // Put all b-quarks in a vector
       ParticleVector bquarks;
       foreach (const GenParticle* p, particles(event.genEvent())) {
@@ -76,18 +76,18 @@
           bquarks += Particle(*p);
         }
       }
-      
-      // Get jets 
+   
+      // Get jets
       const FastJets& jetpro = applyProjection<FastJets>(event, "Jets");
       getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
-      
+   
       const PseudoJets& jets = jetpro.pseudoJetsByPt();
       getLog() << Log::DEBUG << "jetlist size = " << jets.size() << endl;
-      
+   
       int numBJet = 0;
       int numJet  = 0;
       // for each b-jet plot the ET and the eta of the jet, normalise to the total cross section at the end
-      // for each event plot N jet and pT(Z), normalise to the total cross section at the end 
+      // for each event plot N jet and pT(Z), normalise to the total cross section at the end
       for (PseudoJets::const_iterator jt = jets.begin(); jt != jets.end(); ++jt) {
         // select jets that pass the kinematic cuts
         if (jt->perp() > _JetPtCut && fabs(jt->rapidity()) <= _JetEtaCut) {
@@ -102,27 +102,27 @@
           } // end loop around b-jets
           if (bjet) {
             numBJet++;
-            _dSdET->fill(jt->perp(),event.weight()); 
-            _dSdETA->fill(jt->rapidity(),event.weight()); 
+            _dSdET->fill(jt->perp(),event.weight());
+            _dSdETA->fill(jt->rapidity(),event.weight());
           }
         }
       } // end loop around jets
-      
+   
       if(numJet > 0) _dSdNJet->fill(numJet,event.weight());
       if(numBJet > 0) {
         _dSdNbJet->fill(numBJet,event.weight());
-        _dSdZpT->fill(Zmom.pT(),event.weight()); 
-      } 
+        _dSdZpT->fill(Zmom.pT(),event.weight());
+      }
     }
-    
-  
+ 
+
 
     // Finalize
-    void finalize() {  
+    void finalize() {
       // normalise histograms
       // scale by 1 / the sum-of-weights of events that pass the Z cuts
       // since the cross sections are normalized to the inclusive
-      // Z cross sections. 
+      // Z cross sections.
       double Scale = 1.0;
       if (_sumWeightSelected != 0.0) Scale = 1.0/_sumWeightSelected;
       _dSdET->scale(Scale);
@@ -141,20 +141,20 @@
     double _JetPtCut;
     double _JetEtaCut;
     double _sumWeightSelected;
- 
+
     //@{
     /// Histograms
     AIDA::IHistogram1D* _dSdET;
     AIDA::IHistogram1D* _dSdETA;
-    AIDA::IHistogram1D* _dSdNJet; 
-    AIDA::IHistogram1D* _dSdNbJet; 
-    AIDA::IHistogram1D* _dSdZpT; 
+    AIDA::IHistogram1D* _dSdNJet;
+    AIDA::IHistogram1D* _dSdNbJet;
+    AIDA::IHistogram1D* _dSdZpT;
 
     //@}
 
   };
-  
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<CDF_2008_S8095620> plugin_CDF_2008_S8095620;
 

Modified: trunk/src/Analyses/CDF_2009_S8057893.cc
==============================================================================
--- trunk/src/Analyses/CDF_2009_S8057893.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2009_S8057893.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,7 +12,7 @@
   public:
 
     /// Constructor
-    CDF_2009_S8057893::CDF_2009_S8057893() 
+    CDF_2009_S8057893::CDF_2009_S8057893()
       : Analysis("CDF_2009_S8057893")
     {
       setBeams(PROTON, ANTIPROTON);
@@ -42,13 +42,13 @@
       // Okay, so here are the questions:
 
       //  * What |eta| and pT_min acceptance cuts were used?
-      //  * Is the "cone algorithm" JETCLU or MIDPOINT? You refer to the old 1992 paper that defines 
+      //  * Is the "cone algorithm" JETCLU or MIDPOINT? You refer to the old 1992 paper that defines
       //    JETCLU, but I thought Run II analyses were using the more IRC-safe midpoint algorithm.
       //  * Effective min j1, j2 Et values?
       //  * Definition of "require the two leading jets to be well-balanced in Et"?
-      //  * Definition of the complementary cones: per-jet for j1, j2? Otherwise, what is defn of 
-      //    "dijet axis" (since the two jet axes will not exactly match due to ISR and extra jets.) 
-      //    Complementary cones are same eta as jet, but phi +- 90 degrees? Radius of compl. cones 
+      //  * Definition of the complementary cones: per-jet for j1, j2? Otherwise, what is defn of
+      //    "dijet axis" (since the two jet axes will not exactly match due to ISR and extra jets.)
+      //    Complementary cones are same eta as jet, but phi +- 90 degrees? Radius of compl. cones
       //    = 1.0? Or defined in theta_c (not Lorentz invariant)?
       //  * kT of tracks rel to jet axis for all jets, j1 & j2, or just j1?
 

Modified: trunk/src/Analyses/CDF_2009_S8233977.cc
==============================================================================
--- trunk/src/Analyses/CDF_2009_S8233977.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2009_S8233977.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,31 +10,31 @@
 
   /* @brief CDF Run II min-bias cross-section
    * @author Hendrik Hoeth
-   * 
+   *
    * Measurement of \f$ \langle p_T \rangle \f$ vs. \f$ n_\text{ch} \f$,
    * the track \f$ p_T \f$ distribution, and the \f$ \sum E_T \f$ distribution.
    * Particles are selected within |eta|<1 and with pT>0.4 GeV.
    * There is no pT cut for the \f$ \sum E_T \f$ measurement.
-   * 
+   *
    * @par Run conditions
-   * 
+   *
    * @arg \f$ \sqrt{s} = \f$ 1960 GeV
    * @arg Run with generic QCD events.
    * @arg Set particles with c*tau > 10 mm stable
-   * 
-   */ 
+   *
+   */
   class CDF_2009_S8233977 : public Analysis {
   public:
 
     /// Constructor
     CDF_2009_S8233977()
       : Analysis("CDF_2009_S8233977")
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -50,7 +50,7 @@
     }
 
 
-    
+ 
     /// Do the analysis
     void analyze(const Event& evt) {
       // Trigger: need at least one charged particle in both -4.7 < eta < -3.7 and 3.7 < eta < 4.7
@@ -75,7 +75,7 @@
       foreach (const Particle& p, trackfs.particles()) {
         const double pT = p.momentum().pT() / GeV;
         _hist_pt_vs_multiplicity->fill(numParticles, pT, weight);
-        
+     
         // The weight for entries in the pT distribution should be weight/(pT*dPhi*dy).
         //
         // - dPhi = 2*PI
@@ -105,14 +105,14 @@
       _hist_sumEt->fill(sumEt, weight);
     }
 
-    
-    
+ 
+ 
     /// Normalize histos
     void finalize() {
       scale(_hist_sumEt, crossSection()/millibarn/(4*M_PI*sumOfWeights()));
       scale(_hist_pt, crossSection()/millibarn/sumOfWeights());
     }
-    
+ 
     //@}
 
 

Modified: trunk/src/Analyses/CDF_2009_S8383952.cc
==============================================================================
--- trunk/src/Analyses/CDF_2009_S8383952.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2009_S8383952.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_2009_S8383952()
-      : Analysis("CDF_2009_S8383952") 
+      : Analysis("CDF_2009_S8383952")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -68,7 +68,7 @@
     /// Normalise histograms etc., after the run
     void finalize() {
       scale(_h_xs, crossSection()/sumOfWeights());
-      // Data seems to have been normalized for the avg of the two sides 
+      // Data seems to have been normalized for the avg of the two sides
       // (+ve & -ve rapidity) rather than the sum, hence the 0.5:
       scale(_h_yZ, 0.5*crossSection()/sumOfWeights());
     }

Modified: trunk/src/Analyses/CDF_2009_S8436959.cc
==============================================================================
--- trunk/src/Analyses/CDF_2009_S8436959.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/CDF_2009_S8436959.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
 
     /// Constructor
     CDF_2009_S8436959()
-      : Analysis("CDF_2009_S8436959") 
+      : Analysis("CDF_2009_S8436959")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
@@ -64,7 +64,7 @@
       if (photons.size() != 1) {
         vetoEvent;
       }
-      
+   
       _h_Et_photon->fill(photons[0].momentum().Et(), weight);
     }
 

Modified: trunk/src/Analyses/D0_1996_S3214044.cc
==============================================================================
--- trunk/src/Analyses/D0_1996_S3214044.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_1996_S3214044.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,13 +18,13 @@
     //@{
 
     /// Constructor
-    D0_1996_S3214044() : Analysis("D0_1996_S3214044") 
+    D0_1996_S3214044() : Analysis("D0_1996_S3214044")
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(false);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -34,7 +34,7 @@
       addProjection(fs, "FS");
       /// @todo Use correct jet algorithm
       addProjection(FastJets(fs, FastJets::D0ILCONE, 0.7, 20.0*GeV), "ConeJets");
-      
+   
       _h_3j_x3 = bookHistogram1D(1, 1, 1);
       _h_3j_x5 = bookHistogram1D(2, 1, 1);
       _h_3j_costheta3 = bookHistogram1D(3, 1, 1);
@@ -42,7 +42,7 @@
       _h_3j_mu34 = bookHistogram1D(5, 1, 1);
       _h_3j_mu35 = bookHistogram1D(6, 1, 1);
       _h_3j_mu45 = bookHistogram1D(7, 1, 1);
-      
+   
       _h_4j_x3 = bookHistogram1D(8, 1, 1);
       _h_4j_x4 = bookHistogram1D(9, 1, 1);
       _h_4j_x5 = bookHistogram1D(10, 1, 1);
@@ -65,20 +65,20 @@
       _h_4j_mu56 = bookHistogram1D(27, 1, 1);
       _h_4j_theta_BZ = bookHistogram1D(28, 1, 1);
       _h_4j_costheta_NR = bookHistogram1D(29, 1, 1);
-      
+   
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       Jets jets_in;
       foreach (const Jet& jet, applyProjection<FastJets>(event, "ConeJets").jetsByEt()) {
         if (fabs(jet.momentum().eta()) < 3.0) {
           jets_in.push_back(jet);
         }
       }
-      
+   
       Jets jets_isolated;
       for (size_t i = 0; i < jets_in.size(); ++i) {
         bool isolated=true;
@@ -92,11 +92,11 @@
           jets_isolated.push_back(jets_in[i]);
         }
       }
-      
+   
       if (jets_isolated.size() == 0 || jets_isolated[0].momentum().Et() < 60.0*GeV) {
         vetoEvent;
       }
-      
+   
       if (jets_isolated.size() > 2) _threeJetAnalysis(jets_isolated, weight);
       if (jets_isolated.size() > 3) _fourJetAnalysis(jets_isolated, weight);
     }
@@ -133,7 +133,7 @@
       normalize(_h_4j_theta_BZ, 1.0);
       normalize(_h_4j_costheta_NR, 1.0);
     }
-    
+ 
     //@}
 
 
@@ -141,15 +141,15 @@
 
     /// @name Helper functions
     //@{
-    
-    void _threeJetAnalysis(const Jets& jets, const double& weight) {    
+ 
+    void _threeJetAnalysis(const Jets& jets, const double& weight) {
       // >=3 jet events
       FourMomentum jjj(jets[0].momentum()+jets[1].momentum()+jets[2].momentum());
       const double sqrts = jjj.mass();
       if (sqrts<200*GeV) {
         return;
       }
-    
+ 
       LorentzTransform cms_boost(-jjj.boostVector());
       vector<FourMomentum> jets_boosted;
       foreach (Jet jet, jets) {
@@ -159,12 +159,12 @@
       FourMomentum p3(jets_boosted[0]);
       FourMomentum p4(jets_boosted[1]);
       FourMomentum p5(jets_boosted[2]);
-      
+   
       Vector3 beam1(0.0, 0.0, 1.0);
       Vector3 p1xp3 = beam1.cross(p3.vector3());
       Vector3 p4xp5 = p4.vector3().cross(p5.vector3());
       const double cospsi = p1xp3.dot(p4xp5)/p1xp3.mod()/p4xp5.mod();
-      
+   
       _h_3j_x3->fill(2.0*p3.E()/sqrts, weight);
       _h_3j_x5->fill(2.0*p5.E()/sqrts, weight);
       _h_3j_costheta3->fill(fabs(cos(p3.theta())), weight);
@@ -173,14 +173,14 @@
       _h_3j_mu35->fill(FourMomentum(p3+p5).mass()/sqrts, weight);
       _h_3j_mu45->fill(FourMomentum(p4+p5).mass()/sqrts, weight);
     }
-    
-    
-    void _fourJetAnalysis(const Jets& jets, const double& weight) {    
+ 
+ 
+    void _fourJetAnalysis(const Jets& jets, const double& weight) {
       // >=4 jet events
       FourMomentum jjjj(jets[0].momentum() + jets[1].momentum() + jets[2].momentum()+ jets[3].momentum());
       const double sqrts = jjjj.mass();
       if (sqrts < 200*GeV) return;
-      
+   
       LorentzTransform cms_boost(-jjjj.boostVector());
       vector<FourMomentum> jets_boosted;
       foreach (Jet jet, jets) {
@@ -191,13 +191,13 @@
       FourMomentum p4(jets_boosted[1]);
       FourMomentum p5(jets_boosted[2]);
       FourMomentum p6(jets_boosted[3]);
-      
+   
       Vector3 p3xp4 = p3.vector3().cross(p4.vector3());
       Vector3 p5xp6 = p5.vector3().cross(p6.vector3());
       const double costheta_BZ = p3xp4.dot(p5xp6)/p3xp4.mod()/p5xp6.mod();
       const double costheta_NR = (p3.vector3()-p4.vector3()).dot(p5.vector3()-p6.vector3())/
         (p3.vector3()-p4.vector3()).mod()/(p5.vector3()-p6.vector3()).mod();
-      
+   
       _h_4j_x3->fill(2.0*p3.E()/sqrts, weight);
       _h_4j_x4->fill(2.0*p4.E()/sqrts, weight);
       _h_4j_x5->fill(2.0*p5.E()/sqrts, weight);
@@ -220,9 +220,9 @@
       _h_4j_mu56->fill(FourMomentum(p5+p6).mass()/sqrts, weight);
       _h_4j_theta_BZ->fill(acos(costheta_BZ)/degree, weight);
       _h_4j_costheta_NR->fill(costheta_NR, weight);
-      
+   
     }
-       
+    
 
   private:
 
@@ -236,7 +236,7 @@
     AIDA::IHistogram1D *_h_3j_mu34;
     AIDA::IHistogram1D *_h_3j_mu35;
     AIDA::IHistogram1D *_h_3j_mu45;
-    
+ 
     AIDA::IHistogram1D *_h_4j_x3;
     AIDA::IHistogram1D *_h_4j_x4;
     AIDA::IHistogram1D *_h_4j_x5;
@@ -261,10 +261,10 @@
     AIDA::IHistogram1D *_h_4j_costheta_NR;
     //@}
 
-  }; 
-    
-    
+  };
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_1996_S3214044> plugin_D0_1996_S3214044;
-    
+ 
 }

Modified: trunk/src/Analyses/D0_1996_S3324664.cc
==============================================================================
--- trunk/src/Analyses/D0_1996_S3324664.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_1996_S3324664.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,8 +20,8 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(false);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -41,23 +41,23 @@
 
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       Jets jets;
       foreach (const Jet& jet, applyProjection<FastJets>(event, "ConeJets").jets()) {
         if (fabs(jet.momentum().eta()) < 3.0) {
           jets.push_back(jet);
         }
       }
-      
+   
       if (jets.size() < 2) {
         vetoEvent;
       }
-    
+ 
       FourMomentum minjet = jets[0].momentum();
       FourMomentum maxjet = jets[1].momentum();
       double mineta = minjet.eta();
       double maxeta = maxjet.eta();
-    
+ 
       foreach(const Jet& jet, jets) {
         double eta = jet.momentum().eta();
         if (eta < mineta) {
@@ -69,33 +69,33 @@
           maxeta = eta;
         }
       }
-      
+   
       if (minjet.Et()<50*GeV && maxjet.Et()<50.0*GeV) {
         vetoEvent;
       }
-      
+   
       double deta = maxjet.eta()-minjet.eta();
       double dphi = mapAngle0To2Pi(maxjet.phi()-minjet.phi());
-      
+   
       _h_deta->fill(deta, weight);
       _h_dphi.fill(deta, 1.0-dphi/M_PI, weight);
       _h_cosdphi_deta->fill(deta, cos(M_PI-dphi), weight);
-      
+   
     }
-    
-    
+ 
+ 
     void finalize() {
       // Normalised to #events
-      normalize(_h_deta, 8830.0); 
-      
+      normalize(_h_deta, 8830.0);
+   
       // I have no idea what this is normalised to... in the paper it says unity!
       /// @todo Understand this!
       foreach (IHistogram1D* histo, _h_dphi.getHistograms()) {
         normalize(histo, 0.0798);
       }
-      
+   
     }
-    
+ 
     //@}
 
 

Modified: trunk/src/Analyses/D0_2001_S4674421.cc
==============================================================================
--- trunk/src/Analyses/D0_2001_S4674421.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2001_S4674421.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -26,17 +26,17 @@
     //  - @c _mZmax = upper Z mass cut used in the publication analysis
     D0_2001_S4674421()
       : Analysis("D0_2001_S4674421"),
-        _mwmz(0.8820), _brwenu(0.1073), _brzee(0.033632), 
+        _mwmz(0.8820), _brwenu(0.1073), _brzee(0.033632),
         _mZmin(75.*GeV), _mZmax(105.*GeV)
-    { 
+    {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-    }    
-    
-    
+    }
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       // Final state projection
       FinalState fs(-5.0, 5.0); // corrected for detector acceptance
@@ -46,12 +46,12 @@
       LeadingParticlesFinalState eeFS(fs, -2.5, 2.5, 0.); //20.);
       eeFS.addParticleIdPair(ELECTRON);
       addProjection(eeFS, "eeFS");
-      
+   
       // W- -> e- nu_e~
       LeadingParticlesFinalState enuFS(fs, -2.5, 2.5, 0.); //25.);
       enuFS.addParticleId(ELECTRON).addParticleId(NU_EBAR);
       addProjection(enuFS, "enuFS");
-      
+   
       // W+ -> e+ nu_e
       LeadingParticlesFinalState enubFS(fs, -2.5, 2.5, 0.); //25.);
       enubFS.addParticleId(POSITRON).addParticleId(NU_E);
@@ -94,10 +94,10 @@
           _h_dsigdpt_z->fill(pmom.pT()/GeV, weight);
           _h_dsigdpt_scaled_z->fill(pmom.pT()/GeV * _mwmz, weight);
         }
-      } else { 
+      } else {
         // There is no Z -> ee candidate... so this must be a W event, right?
         const LeadingParticlesFinalState& enuFS = applyProjection<LeadingParticlesFinalState>(event, "enuFS");
-        const LeadingParticlesFinalState& enubFS = applyProjection<LeadingParticlesFinalState>(event, "enubFS"); 
+        const LeadingParticlesFinalState& enubFS = applyProjection<LeadingParticlesFinalState>(event, "enubFS");
         static size_t Wcount = 0;
 
         // Fill W pT distributions
@@ -119,7 +119,7 @@
 
 
 
-    void finalize() { 
+    void finalize() {
       // Get cross-section per event (i.e. per unit weight) from generator
       const double xSecPerEvent = crossSection()/picobarn / sumOfWeights();
 
@@ -134,7 +134,7 @@
       const double zpt_scaled_integral = integral(_h_dsigdpt_scaled_z);
 
       // Divide and scale ratio histos
-      AIDA::IDataPointSet* div = histogramFactory().divide(histoDir() + "/d02-x01-y01", *_h_dsigdpt_w, *_h_dsigdpt_scaled_z); 
+      AIDA::IDataPointSet* div = histogramFactory().divide(histoDir() + "/d02-x01-y01", *_h_dsigdpt_w, *_h_dsigdpt_scaled_z);
       div->setTitle("$[\\mathrm{d}\\sigma/\\mathrm{d}p_\\perp(W)] / [\\mathrm{d}\\sigma/\\mathrm{d}(p_\\perp(Z) \\cdot M_W/M_Z)]$");
       if (xSecW == 0 || wpt_integral == 0 || xSecZ == 0 || zpt_scaled_integral == 0) {
         getLog() << Log::WARN << "Not filling ratio plot because input histos are empty" << endl;
@@ -160,18 +160,18 @@
 
 
     //@}
-    
+ 
   private:
-    
-    /// Analysis used ratio of mW/mZ 
+ 
+    /// Analysis used ratio of mW/mZ
     const double _mwmz;
-    
+ 
     /// Ratio of \f$ BR(W->e,nu) \f$ used in the publication analysis
     const double _brwenu;
-    
+ 
     /// Ratio of \f$ \text{BR}( Z \to e^+ e^-) \f$ used in the publication analysis
     const double _brzee;
-    
+ 
     /// Invariant mass cuts for Z boson candidate (75 GeV < mZ < 105 GeV)
     const double _mZmin, _mZmax;
 
@@ -179,13 +179,13 @@
     // Event counters for cross section normalizations
     double _eventsFilledW;
     double _eventsFilledZ;
-    
+ 
     //@{
     /// Histograms
     AIDA::IHistogram1D* _h_dsigdpt_w;
     AIDA::IHistogram1D* _h_dsigdpt_z;
     AIDA::IHistogram1D* _h_dsigdpt_scaled_z;
-   //@}    
+   //@}
 
   };
 

Modified: trunk/src/Analyses/D0_2004_S5992206.cc
==============================================================================
--- trunk/src/Analyses/D0_2004_S5992206.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2004_S5992206.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,18 +10,18 @@
 
   /* @brief D0 Run II jet analysis
    * @author Lars Sonnenschein
-   * 
+   *
    * Measurement of angular correlations in di-jet events.
-   * 
-   * 
+   *
+   *
    * @par Run conditions
-   * 
+   *
    * @arg \f$ \sqrt{s} = \f$ 1960 GeV
    * @arg Run with generic QCD events.
    * @arg Several \f$ p_\perp^\text{min} \f$ cutoffs are probably required to fill the histograms:
    *   @arg \f$ p_\perp^\text{min} = \f$ 50, 75, 100, 150 GeV for the four pT ranges respecively
-   * 
-   */ 
+   *
+   */
   class D0_2004_S5992206 : public Analysis {
 
   public:
@@ -47,7 +47,7 @@
       addProjection(fs, "FS");
       addProjection(FastJets(fs, FastJets::D0ILCONE, 0.7, 6*GeV), "Jets");
       addProjection(TotalVisibleMomentum(fs), "CalMET");
-      
+   
       // Veto neutrinos, and muons with pT above 1.0 GeV
       VetoedFinalState vfs(fs);
       vfs.vetoNeutrinos();
@@ -68,10 +68,10 @@
       // Analyse and print some info
       const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
       getLog() << Log::DEBUG << "Jet multiplicity before any pT cut = " << jetpro.size() << endl;
-      
+   
       const Jets jets  = jetpro.jetsByPt(40.0*GeV);
       if (jets.size() >= 2) {
-        getLog() << Log::DEBUG << "Jet multiplicity after pT > 40 GeV cut = " << jets.size() << endl; 
+        getLog() << Log::DEBUG << "Jet multiplicity after pT > 40 GeV cut = " << jets.size() << endl;
       } else {
         vetoEvent;
       }
@@ -82,13 +82,13 @@
       }
       getLog() << Log::DEBUG << "Jet eta and pT requirements fulfilled" << endl;
       const double pT1 = jets[0].momentum().pT();
-      
+   
       const TotalVisibleMomentum& caloMissEt = applyProjection<TotalVisibleMomentum>(event, "CalMET");
       getLog() << Log::DEBUG << "Missing Et = " << caloMissEt.momentum().pT()/GeV << endl;
       if (caloMissEt.momentum().pT() > 0.7*pT1) {
         vetoEvent;
       }
-      
+   
       if (pT1/GeV >= 75.0) {
         const double weight = event.weight();
         const double dphi = deltaPhi(jets[0].momentum().phi(), jets[1].momentum().phi());
@@ -102,19 +102,19 @@
           _histJetAzimuth_pTmax180_->fill(dphi, weight);
         }
       }
-      
+   
     }
-    
-    
+ 
+ 
     // Finalize
-    void finalize() { 
+    void finalize() {
       // Normalize histograms to unit area
       normalize(_histJetAzimuth_pTmax75_100);
       normalize(_histJetAzimuth_pTmax100_130);
       normalize(_histJetAzimuth_pTmax130_180);
       normalize(_histJetAzimuth_pTmax180_);
     }
-    
+ 
     //@}
 
 
@@ -130,8 +130,8 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2004_S5992206> plugin_D0_2004_S5992206;
 

Modified: trunk/src/Analyses/D0_2006_S6438750.cc
==============================================================================
--- trunk/src/Analyses/D0_2006_S6438750.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2006_S6438750.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -24,19 +24,19 @@
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-    } 
-    
+    }
+ 
     //@}
 
 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     void init() {
       // General FS for photon isolation
       FinalState fs(-1.5, 1.5);
       addProjection(fs, "AllFS");
-      
+   
       // Get leading photon
       LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
       photonfs.addParticleId(PHOTON);
@@ -45,9 +45,9 @@
       // Book histograms
       _h_pTgamma = bookHistogram1D(1, 1, 1);
     }
-    
+ 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event& event) {
 
       // Get the photon
@@ -61,13 +61,13 @@
         getLog() << Log::DEBUG << "Leading photon has pT < 23 GeV: " << photon.pT()/GeV << endl;
         vetoEvent;
       }
-      
+   
       // Get all other particles
       const FinalState& fs = applyProjection<FinalState>(event, "AllFS");
       if (fs.empty()) {
         vetoEvent;
       }
-      
+   
       // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
       const double egamma = photon.E();
       // Energy inside R = 0.2
@@ -89,7 +89,7 @@
         getLog() << Log::DEBUG << "Vetoing event because photon is insufficiently isolated" << endl;
         vetoEvent;
       }
-      
+   
       // Veto if leading jet is outside plotted rapidity regions
       const double eta_gamma = fabs(photon.pseudorapidity());
       if (eta_gamma > 0.9) {
@@ -97,24 +97,24 @@
                  << "|eta_gamma| = " << eta_gamma << endl;
         vetoEvent;
       }
-      
+   
       // Fill histo
       const double weight = event.weight();
-      _h_pTgamma->fill(photon.pT(), weight); 
+      _h_pTgamma->fill(photon.pT(), weight);
     }
-    
-    
+ 
+ 
 
     // Finalize
     void finalize() {
       /// @todo Generator cross-section from Pythia gives ~7500, vs. expected 2988!
       //normalize(_h_pTgamma, 2988.4869);
-      
+   
       const double lumi_gen = sumOfWeights()/crossSection();
       // Divide by effective lumi, plus rapidity bin width of 1.8
       scale(_h_pTgamma, 1/lumi_gen * 1/1.8);
     }
-    
+ 
     //@}
 
 

Modified: trunk/src/Analyses/D0_2007_S7075677.cc
==============================================================================
--- trunk/src/Analyses/D0_2007_S7075677.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2007_S7075677.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,15 +16,15 @@
   public:
 
     /// Default constructor.
-    D0_2007_S7075677() : Analysis("D0_2007_S7075677")  
+    D0_2007_S7075677() : Analysis("D0_2007_S7075677")
     {
       // Run II Z rapidity
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book histograms
     void init() {
@@ -42,10 +42,10 @@
     }
 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event & e) {
       const double weight = e.weight();
-      
+   
       const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
       if (zfinder.particles().size() == 1) {
         const ParticleVector& el(zfinder.constituentsFinalState().particles());
@@ -58,11 +58,11 @@
         getLog() << Log::DEBUG << "No unique lepton pair found." << endl;
       }
     }
-    
-    
+ 
+ 
     // Finalize
     void finalize() {
-      // Data seems to have been normalized for the avg of the two sides 
+      // Data seems to have been normalized for the avg of the two sides
       // (+ve & -ve rapidity) rather than the sum, hence the 0.5:
       normalize(_h_yZ, 0.5);
     }

Modified: trunk/src/Analyses/D0_2008_S6879055.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S6879055.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S6879055.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,44 +15,44 @@
   /// @brief Measurement of the ratio sigma(Z/gamma* + n jets)/sigma(Z/gamma*)
   class D0_2008_S6879055 : public Analysis {
   public:
-    
+ 
     /// Default constructor.
     D0_2008_S6879055() : Analysis("D0_2008_S6879055")
     {
       setBeams(PROTON, ANTIPROTON);
-    } 
+    }
 
 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     // Book histograms
     void init() {
       // Basic final state
       FinalState fs(-5.0, 5.0);
       addProjection(fs, "FS");
-      
+   
       // Leading electrons in tracking acceptance
       LeadingParticlesFinalState lpfs(fs, -1.1, 1.1, 25*GeV);
       lpfs.addParticleId(ELECTRON).addParticleId(POSITRON);
       addProjection(lpfs, "LeadingElectronsFS");
-      
+   
       // Invariant mass selection around Z pole
       InvMassFinalState electronsFromZ(lpfs, make_pair(ELECTRON, POSITRON), 75*GeV, 105*GeV);
       addProjection(electronsFromZ,"ElectronsFromZ");
-      
+   
       // Vetoed FS for jets
       VetoedFinalState vfs(fs);
       // Add particle/antiparticle vetoing
       vfs.vetoNeutrinos();
-      // Veto the electrons from Z decay  
+      // Veto the electrons from Z decay
       vfs.addVetoOnThisFinalState(electronsFromZ);
       addProjection(vfs, "JetFS");
-      
+   
       // Jet finder
       FastJets jets(vfs, FastJets::D0ILCONE, 0.5, 20.0*GeV);
       addProjection(jets, "Jets");
-      
+   
       // Vertex
       PVertex vertex;
       addProjection(vertex, "PrimaryVertex");
@@ -62,19 +62,19 @@
       _pTjet2 = bookHistogram1D(3, 1, 1);
       _pTjet3 = bookHistogram1D(4, 1, 1);
     }
-    
-    
-    
-    /// Do the analysis 
+ 
+ 
+ 
+    /// Do the analysis
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       // Skip if the event is empty
       const FinalState& fs = applyProjection<FinalState>(event, "FS");
       if (fs.empty()) {
         vetoEvent;
       }
-      
+   
       // Check that the primary vertex is within 60 cm in z from (0,0,0)
       const PVertex& vertex = applyProjection<PVertex>(event, "PrimaryVertex");
       getLog() << Log::DEBUG << "Primary vertex is at " << vertex.position()/cm << " cm" << endl;
@@ -82,7 +82,7 @@
         getLog() << Log::DEBUG << "Vertex z-position " << vertex.position().z()/cm << " is outside cuts" << endl;
         vetoEvent;
       }
-      
+   
       // Find the Z candidates
       const InvMassFinalState& invmassfs = applyProjection<InvMassFinalState>(event, "ElectronsFromZ");
       // If there is no Z candidate in the FinalState, skip the event
@@ -90,7 +90,7 @@
         getLog() << Log::DEBUG << "No Z candidate found" << endl;
         vetoEvent;
       }
-      
+   
       // Now build the list of jets on a FS without the electrons from Z
       // Additional cuts on jets: |eta| < 2.5 and dR(j,leading electron) > 0.4
       const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
@@ -100,25 +100,25 @@
         const double jeta = j.momentum().pseudorapidity();
         const double jphi = j.momentum().azimuthalAngle();
         if (fabs(jeta) > 2.5) continue;
-        
+     
         FourMomentum e0 = invmassfs.particles()[0].momentum();
         FourMomentum e1 = invmassfs.particles()[1].momentum();
         const double e0eta = e0.pseudorapidity();
         const double e0phi = e0.azimuthalAngle();
         if (deltaR(e0eta, e0phi, jeta, jphi) < 0.4) continue;
-        
+     
         const double e1eta = e1.pseudorapidity();
         const double e1phi = e1.azimuthalAngle();
         if (deltaR(e1eta, e1phi, jeta, jphi) < 0.4) continue;
-        
+     
         // If we pass all cuts...
         finaljet_list.push_back(j.momentum());
       }
       getLog() << Log::DEBUG << "Num jets passing = " << finaljet_list.size() << endl;
-      
+   
       // For normalisation of crossSection data (includes events with no jets passing cuts)
       _crossSectionRatio->fill(0, weight);
-      
+   
       // Fill jet pT and multiplicities
       if (finaljet_list.size() >= 1) {
         _crossSectionRatio->fill(1, weight);
@@ -136,14 +136,14 @@
         _crossSectionRatio->fill(4, weight);
       }
     }
-    
-    
-    
+ 
+ 
+ 
     /// Finalize
     void finalize() {
       // Now divide by the inclusive result
       _crossSectionRatio->scale(1.0/_crossSectionRatio->binHeight(0));
-      
+   
       // Normalise jet pT's to integral of data
       // there is no other way to do this, because these quantities are not
       // detector corrected
@@ -151,7 +151,7 @@
       normalize(_pTjet2, 1461.5);
       normalize(_pTjet3, 217.0);
     }
-    
+ 
     //@}
 
 
@@ -167,9 +167,9 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S6879055> plugin_D0_2008_S6879055;
-  
+
 }

Modified: trunk/src/Analyses/D0_2008_S7554427.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S7554427.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S7554427.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,11 +21,11 @@
     {
       // Run II Z pT
       setBeams(PROTON, ANTIPROTON);
-    } 
-    
-    
+    }
+ 
+ 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book histograms
     void init() {
@@ -39,7 +39,7 @@
 
 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event & e) {
       const double weight = e.weight();
 
@@ -55,17 +55,17 @@
       else {
         getLog() << Log::DEBUG << "no unique lepton pair found." << endl;
       }
-      
+   
     }
-    
-    
-    
+ 
+ 
+ 
     // Finalize
     void finalize() {
       normalize(_h_ZpT);
       normalize(_h_forward_ZpT);
     }
-    
+ 
     //@}
 
 
@@ -79,8 +79,8 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S7554427> plugin_D0_2008_S7554427;
 

Modified: trunk/src/Analyses/D0_2008_S7662670.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S7662670.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S7662670.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -27,21 +27,21 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
+ 
     //@}
 
 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
-    void init() 
+    void init()
     {
-      
+   
       // Full final state
       FinalState fs(-5.0, 5.0);
       addProjection(fs, "FS");
 
-      // Jets      
+      // Jets
       FastJets jetpro(fs, FastJets::D0ILCONE, 0.7, 6*GeV);
       addProjection(jetpro, "Jets");
 
@@ -53,20 +53,20 @@
       _h_dsigdptdy_y16_20 = bookHistogram1D(5, 1, 1);
       _h_dsigdptdy_y20_24 = bookHistogram1D(6, 1, 1);
     }
-    
-    
-    
-    /// Do the analysis 
+ 
+ 
+ 
+    /// Do the analysis
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       // Skip if the event is empty
       const FinalState& fs = applyProjection<FinalState>(event, "FS");
       if (fs.empty()) {
         getLog() << Log::DEBUG << "Empty event!" << endl;
         vetoEvent;
       }
-      
+   
       // Find the jets
       const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
       // If there are no jets, skip the event
@@ -80,7 +80,7 @@
         const double pt = j.momentum().pT();
         const double y = fabs(j.momentum().rapidity());
         if (pt/GeV > 50) {
-          getLog() << Log::TRACE << "Filling histos: pT = " << pt/GeV 
+          getLog() << Log::TRACE << "Filling histos: pT = " << pt/GeV
                    << ", |y| = " << y << endl;
           if (y < 0.4) {
             _h_dsigdptdy_y00_04->fill(pt/GeV, weight);
@@ -97,9 +97,9 @@
           }
         }
       }
-      
+   
     }
-    
+ 
 
     /// Finalize
     void finalize() {
@@ -129,10 +129,10 @@
     //@}
 
   };
-    
-    
-    
+ 
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S7662670> plugin_D0_2008_S7662670;
-  
+
 }

Modified: trunk/src/Analyses/D0_2008_S7719523.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S7719523.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S7719523.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,7 +11,7 @@
 
 
   /// @brief Measurement of isolated gamma + jet + X differential cross-sections
-  /// Inclusive isolated gamma + jet cross-sections, differential in pT(gamma), for 
+  /// Inclusive isolated gamma + jet cross-sections, differential in pT(gamma), for
   /// various photon and jet rapidity bins.
   ///
   /// @author Andy Buckley
@@ -29,25 +29,25 @@
     {
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
-    } 
-    
+    }
+ 
     //@}
 
 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     /// Set up projections and book histograms
     void init() {
       // General FS
       FinalState fs(-5.0, 5.0);
       addProjection(fs, "FS");
-      
+   
       // Get leading photon
       LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
       photonfs.addParticleId(PHOTON);
       addProjection(photonfs, "LeadingPhoton");
-      
+   
       // FS for jets excludes the leading photon
       VetoedFinalState vfs(fs);
       vfs.addVetoOnThisFinalState(photonfs);
@@ -57,12 +57,12 @@
       _h_central_same_cross_section = bookHistogram1D(1, 1, 1);
       _h_central_opp_cross_section  = bookHistogram1D(2, 1, 1);
       _h_forward_same_cross_section = bookHistogram1D(3, 1, 1);
-      _h_forward_opp_cross_section  = bookHistogram1D(4, 1, 1); 
+      _h_forward_opp_cross_section  = bookHistogram1D(4, 1, 1);
     }
-    
-    
+ 
+ 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event& event) {
       const double weight = event.weight();
 
@@ -77,13 +77,13 @@
         getLog() << Log::DEBUG << "Leading photon has pT < 30 GeV: " << photon.pT()/GeV << endl;
         vetoEvent;
       }
-      
+   
       // Get all charged particles
       const FinalState& fs = applyProjection<FinalState>(event, "JetFS");
       if (fs.empty()) {
         vetoEvent;
       }
-      
+   
       // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
       const double egamma = photon.E();
       double econe = 0.0;
@@ -97,8 +97,8 @@
           }
         }
       }
-      
-      
+   
+   
       /// @todo Allow proj creation w/o FS as ctor arg, so that calc can be used more easily.
       FastJets jetpro(fs, FastJets::D0ILCONE, 0.7); //< @todo This fs arg makes no sense!
       jetpro.calc(fs.particles());
@@ -111,29 +111,29 @@
           isolated_jets.push_back(j);
         }
       }
-      
-      getLog() << Log::DEBUG << "Num jets after isolation and pT cuts = " 
+   
+      getLog() << Log::DEBUG << "Num jets after isolation and pT cuts = "
                << isolated_jets.size() << endl;
       if (isolated_jets.empty()) {
         getLog() << Log::DEBUG << "No jets pass cuts" << endl;
         vetoEvent;
       }
-      
+   
       // Sort by pT and get leading jet
       sort(isolated_jets.begin(), isolated_jets.end(), cmpJetsByPt);
       const FourMomentum leadingJet = isolated_jets.front().momentum();
       int photon_jet_sign = sign( leadingJet.rapidity() * photon.rapidity() );
-      
+   
       // Veto if leading jet is outside plotted rapidity regions
       const double abs_y1 = fabs(leadingJet.rapidity());
       if (inRange(abs_y1, 0.8, 1.5) || abs_y1 > 2.5) {
-        getLog() << Log::DEBUG << "Leading jet falls outside acceptance range; |y1| = " 
+        getLog() << Log::DEBUG << "Leading jet falls outside acceptance range; |y1| = "
                  << abs_y1 << endl;
         vetoEvent;
       }
-      
+   
       // Fill histos
-      if (fabs(leadingJet.rapidity()) < 0.8) { 
+      if (fabs(leadingJet.rapidity()) < 0.8) {
         if (photon_jet_sign >= 1) {
           _h_central_same_cross_section->fill(photon.pT(), weight);
         } else {
@@ -143,29 +143,29 @@
         if (photon_jet_sign >= 1) {
           _h_forward_same_cross_section->fill(photon.pT(), weight);
         } else {
-          _h_forward_opp_cross_section->fill(photon.pT(), weight); 
+          _h_forward_opp_cross_section->fill(photon.pT(), weight);
         }
       }
-      
+   
     }
-    
-    
-    
+ 
+ 
+ 
     /// Finalize
     void finalize() {
       const double lumi_gen = sumOfWeights()/crossSection();
       const double dy_photon = 2.0;
       const double dy_jet_central = 1.6;
       const double dy_jet_forward = 2.0;
-      
+   
       // Cross-section ratios (6 plots)
       // Central/central and forward/forward ratios
       AIDA::IHistogramFactory& hf = histogramFactory();
       const string dir = histoDir();
-      
+   
       hf.divide(dir + "/d05-x01-y01", *_h_central_opp_cross_section, *_h_central_same_cross_section);
       hf.divide(dir + "/d08-x01-y01", *_h_forward_opp_cross_section, *_h_forward_same_cross_section);
-      
+   
       // Central/forward ratio combinations
       hf.divide(dir + "/d06-x01-y01", *_h_central_same_cross_section,
                 *_h_forward_same_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
@@ -175,14 +175,14 @@
                 *_h_forward_opp_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
       hf.divide(dir + "/d10-x01-y01", *_h_central_opp_cross_section,
                 *_h_forward_opp_cross_section)->scale(dy_jet_forward/dy_jet_central, 1);
-      
+   
       // Use generator cross section for remaining histograms
       scale(_h_central_same_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_central);
       scale(_h_central_opp_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_central);
       scale(_h_forward_same_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_forward);
       scale(_h_forward_opp_cross_section, 1.0/lumi_gen * 1.0/dy_photon * 1.0/dy_jet_forward);
     }
-    
+ 
     //@}
 
   private:
@@ -197,9 +197,9 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S7719523> plugin_D0_2008_S7719523;
-  
+
 }

Modified: trunk/src/Analyses/D0_2008_S7837160.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S7837160.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S7837160.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -23,25 +23,25 @@
     {
       // Run II W charge asymmetry
       setBeams(PROTON, ANTIPROTON);
-    } 
-    
-    
+    }
+ 
+ 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     // Book histograms and set up projections
     void init() {
       // Leading electrons
       FinalState fs(-5.0, 5.0);
-      
+   
       LeadingParticlesFinalState efs(fs);
       efs.addParticleId(ELECTRON).addParticleId(POSITRON);
       addProjection(efs, "WDecayE");
-      
+   
       LeadingParticlesFinalState nufs(fs);
       nufs.addParticleId(NU_E).addParticleId(NU_EBAR);
       addProjection(nufs, "WDecayNu");
-      
+   
       // Final state w/o electron
       IdentifiedFinalState ifs(fs);
       ifs.acceptId(PHOTON);
@@ -56,24 +56,24 @@
       _h_dsigplus_deta_25     = bookHistogram1D("/dsigplus_deta_25", 10, 0.0, 3.2);
       _h_dsigminus_deta_25    = bookHistogram1D("/dsigminus_deta_25", 10, 0.0, 3.2);
     }
-    
-    
-    /// Do the analysis 
+ 
+ 
+    /// Do the analysis
     void analyze(const Event & event) {
       const double weight = event.weight();
-      
+   
       /// @todo Use WFinder projection (includes photon summing)
 
       // Find the W decay products
       const FinalState& efs = applyProjection<FinalState>(event, "WDecayE");
       const FinalState& nufs = applyProjection<FinalState>(event, "WDecayNu");
-      
+   
       // If there is no e/nu_e pair in the FinalState, skip the event
       if (efs.particles().size() < 1 || nufs.particles().size() < 1) {
         getLog() << Log::DEBUG << "No e/nu_e pair found " << endl;
         vetoEvent;
       }
-      
+   
       // Identify leading nu and electron
       ParticleVector es = efs.particles();
       sort(es.begin(), es.end(), cmpParticleByEt);
@@ -82,14 +82,14 @@
       ParticleVector nus = nufs.particles();
       sort(nus.begin(), nus.end(), cmpParticleByEt);
       Particle leading_nu = nus[0];
-      
+   
       // Require that the neutrino has Et > 25 GeV
       const FourMomentum nu = leading_nu.momentum();
       if (nu.Et() < 25*GeV) {
         getLog() << Log::DEBUG << "Neutrino fails Et cut" << endl;
         vetoEvent;
       }
-      
+   
       // Get "raw" electron 4-momentum and add back in photons that could have radiated from the electron
       FourMomentum e = leading_e.momentum();
       const ParticleVector photons = applyProjection<FinalState>(event, "PhotonFS").particles();
@@ -101,13 +101,13 @@
           e += p.momentum();
         }
       }
-      
+   
       // Require that the electron has Et > 25 GeV
       if (e.Et() < 25*GeV) {
         getLog() << Log::DEBUG << "Electron fails Et cut" << endl;
         vetoEvent;
-      }      
-      
+      }
+   
       const double eta_e = fabs(e.pseudorapidity());
       const double et_e = e.Et();
       const int chg_e = PID::threeCharge(leading_e.pdgId());
@@ -133,13 +133,13 @@
         _h_dsigplus_deta_25->fill(eta_e, weight);
       }
     }
-    
-    
+ 
+ 
     /// Finalize
     void finalize() {
       // Construct asymmetry: (dsig+/deta - dsig-/deta) / (dsig+/deta + dsig-/deta) for each Et region
       AIDA::IHistogramFactory& hf = histogramFactory();
-      
+   
       IHistogram1D* num25_35 = hf.subtract("/num25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
       IHistogram1D* denom25_35 = hf.add("/denom25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
       assert(num25_35 && denom25_35);
@@ -169,7 +169,7 @@
       hf.destroy(_h_dsigplus_deta_25);
       hf.destroy(_h_dsigminus_deta_25);
     }
-    
+ 
     //@}
 
 
@@ -183,10 +183,10 @@
     //@}
 
   };
-    
-  
-  
+ 
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S7837160> plugin_D0_2008_S7837160;
-  
+
 }

Modified: trunk/src/Analyses/D0_2008_S7863608.cc
==============================================================================
--- trunk/src/Analyses/D0_2008_S7863608.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2008_S7863608.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -22,18 +22,18 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
+ 
     //@}
 
 
     /// @name Analysis methods
-    //@{     
-    
+    //@{
+ 
     /// Book histograms
     void init() {
       ZFinder zfinder(-1.7, 1.7, 15.0*GeV, MUON, 65.0*GeV, 115.0*GeV, 0.2);
       addProjection(zfinder, "ZFinder");
-      
+   
       FastJets conefinder(zfinder.remainingFinalState(), FastJets::D0ILCONE, 0.5, 20.0*GeV);
       addProjection(conefinder, "ConeFinder");
 
@@ -41,15 +41,15 @@
       _h_jet_y_cross_section = bookHistogram1D(2, 1, 1);
       _h_Z_pT_cross_section = bookHistogram1D(3, 1, 1);
       _h_Z_y_cross_section = bookHistogram1D(4, 1, 1);
-      _h_total_cross_section = bookHistogram1D(5, 1, 1);  
+      _h_total_cross_section = bookHistogram1D(5, 1, 1);
     }
-    
-    
+ 
+ 
 
-    // Do the analysis 
+    // Do the analysis
     void analyze(const Event& e) {
       const double weight = e.weight();
-      
+   
       const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
       if (zfinder.particles().size()==1) {
         const JetAlg& jetpro = applyProjection<JetAlg>(e, "ConeFinder");
@@ -60,14 +60,14 @@
             jets_cut.push_back(j);
           }
         }
-        
+     
         // Return if there are no jets:
         if(jets_cut.size()<1) {
           getLog() << Log::DEBUG << "Skipping event " << e.genEvent().event_number()
                    << " because no jets pass cuts " << endl;
           vetoEvent;
         }
-        
+     
         // cut on Delta R between jet and muons
         foreach (const Jet& j, jets_cut) {
           foreach (const Particle& mu, zfinder.constituentsFinalState().particles()) {
@@ -77,23 +77,23 @@
             }
           }
         }
-        
+     
         const FourMomentum Zmom = zfinder.particles()[0].momentum();
-        
+     
         // In jet pT
         _h_jet_pT_cross_section->fill( jets_cut[0].momentum().pT(), weight);
         _h_jet_y_cross_section->fill( fabs(jets_cut[0].momentum().rapidity()), weight);
-        
+     
         // In Z pT
         _h_Z_pT_cross_section->fill(Zmom.pT(), weight);
         _h_Z_y_cross_section->fill(fabs(Zmom.rapidity()), weight);
-        
+     
         _h_total_cross_section->fill(1960.0, weight);
       }
     }
-    
-    
-    
+ 
+ 
+ 
     /// Finalize
     void finalize() {
       const double invlumi = crossSection()/sumOfWeights();
@@ -103,7 +103,7 @@
       scale(_h_Z_pT_cross_section, invlumi);
       scale(_h_Z_y_cross_section, invlumi);
     }
-    
+ 
     //@}
 
 
@@ -120,9 +120,9 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2008_S7863608> plugin_D0_2008_S7863608;
-  
+
 }

Modified: trunk/src/Analyses/D0_2009_S8202443.cc
==============================================================================
--- trunk/src/Analyses/D0_2009_S8202443.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2009_S8202443.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,13 +20,13 @@
         _sum_of_weights(0.0), _sum_of_weights_constrained(0.0)
     {
       setBeams(PROTON, ANTIPROTON);
-    } 
+    }
 
     //@}
 
 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book histograms
     void init() {
@@ -41,7 +41,7 @@
       FastJets conefinder_constrained(zfinder_constrained.remainingFinalState(),
                                       FastJets::D0ILCONE, 0.5, 20.0*GeV);
       addProjection(conefinder_constrained, "ConeFinderConstrained");
-      
+   
       // Unconstrained leptons
       ZFinder zfinder(FinalState(), ELECTRON, 65.0*GeV, 115.0*GeV, 0.2);
       addProjection(zfinder, "ZFinder");
@@ -55,13 +55,13 @@
       _h_jet2_pT = bookHistogram1D(4, 1, 1);
       _h_jet3_pT = bookHistogram1D(6, 1, 1);
     }
-    
-    
-    
-    // Do the analysis 
+ 
+ 
+ 
+    // Do the analysis
     void analyze(const Event& e) {
       double weight = e.weight();
-      
+   
       // unconstrained electrons first
       const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
       if (zfinder.particles().size()==1) {
@@ -74,7 +74,7 @@
             jets_cut.push_back(j);
           }
         }
-        
+     
         if (jets_cut.size()>0) {
           _h_jet1_pT->fill(jets_cut[0].momentum().pT()/GeV, weight);
         }
@@ -88,8 +88,8 @@
       else {
         getLog() << Log::DEBUG << "no unique lepton pair found." << endl;
       }
-      
-      
+   
+   
       // constrained electrons
       const ZFinder& zfinder_constrained = applyProjection<ZFinder>(e, "ZFinderConstrained");
       if (zfinder_constrained.particles().size()==1) {
@@ -102,7 +102,7 @@
             jets_cut.push_back(j);
           }
         }
-        
+     
         if (jets_cut.size()>0) {
           _h_jet1_pT_constrained->fill(jets_cut[0].momentum().pT()/GeV, weight);
         }
@@ -118,9 +118,9 @@
         vetoEvent;
       }
     }
-    
-    
-    
+ 
+ 
+ 
     // Finalize
     void finalize() {
       scale(_h_jet1_pT, 1.0/_sum_of_weights);
@@ -130,7 +130,7 @@
       scale(_h_jet2_pT_constrained, 1.0/_sum_of_weights_constrained);
       scale(_h_jet3_pT_constrained, 1.0/_sum_of_weights_constrained);
     }
-    
+ 
     //@}
 
 
@@ -145,14 +145,14 @@
     AIDA::IHistogram1D * _h_jet2_pT_constrained;
     AIDA::IHistogram1D * _h_jet3_pT_constrained;
     //@}
-    
+ 
     double _sum_of_weights, _sum_of_weights_constrained;
 
   };
 
-  
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2009_S8202443> plugin_D0_2009_S8202443;
-  
+
 }

Modified: trunk/src/Analyses/D0_2009_S8320160.cc
==============================================================================
--- trunk/src/Analyses/D0_2009_S8320160.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2009_S8320160.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,14 +21,14 @@
     D0_2009_S8320160() : Analysis("D0_2009_S8320160")
     {
       setBeams(PROTON, ANTIPROTON);
-    } 
-    
+    }
+ 
     //@}
 
 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     // Book histograms
     void init() {
       FinalState fs;
@@ -46,30 +46,30 @@
       _h_chi_dijet.addHistogram(1000., 1100., bookHistogram1D(9, 1, 1));
       _h_chi_dijet.addHistogram(1100., 1960, bookHistogram1D(10, 1, 1));
     }
-    
-    
-    
-    /// Do the analysis 
+ 
+ 
+ 
+    /// Do the analysis
     void analyze(const Event & e) {
       const double weight = e.weight();
-      
-      const Jets& jets = applyProjection<JetAlg>(e, "ConeFinder").jetsByPt();      
+   
+      const Jets& jets = applyProjection<JetAlg>(e, "ConeFinder").jetsByPt();
       if (jets.size() < 2) vetoEvent;
-    
+ 
       FourMomentum j0(jets[0].momentum());
       FourMomentum j1(jets[1].momentum());
       double y0 = j0.rapidity();
       double y1 = j1.rapidity();
-      
+   
       if (fabs(y0+y1)>2) vetoEvent;
-      
+   
       double mjj = FourMomentum(j0+j1).mass();
       double chi = exp(fabs(y0-y1));
       _h_chi_dijet.fill(mjj, chi, weight);
     }
-    
-    
-    
+ 
+ 
+ 
     /// Finalize
     void finalize() {
       foreach (AIDA::IHistogram1D* hist, _h_chi_dijet.getHistograms()) {
@@ -78,20 +78,20 @@
     }
 
     //@}
-    
-    
+ 
+ 
   private:
-    
+ 
     /// @name Histograms
     //@{
     BinnedHistogram<double> _h_chi_dijet;
     //@}
-    
+ 
   };
 
 
-  
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2009_S8320160> plugin_D0_2009_S8320160;
-  
+
 }

Modified: trunk/src/Analyses/D0_2009_S8349509.cc
==============================================================================
--- trunk/src/Analyses/D0_2009_S8349509.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/D0_2009_S8349509.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,46 +16,46 @@
     //@{
 
     /// Constructor
-    D0_2009_S8349509() : Analysis("D0_2009_S8349509") 
+    D0_2009_S8349509() : Analysis("D0_2009_S8349509")
     {
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
     //@}
 
 
     /// @name Analysis methods
     //@{
-    
+ 
     /// Book histograms
     void init() {
       ZFinder zfinder(-1.7, 1.7, 15.0*GeV, MUON, 65.0*GeV, 115.0*GeV, 0.2);
       addProjection(zfinder, "ZFinder");
-      
+   
       FastJets conefinder(zfinder.remainingFinalState(), FastJets::D0ILCONE, 0.5, 20.0*GeV);
       addProjection(conefinder, "ConeFinder");
-      
+   
       _h_dphi_jet_Z25 = bookHistogram1D(1, 1, 1);
       _h_dphi_jet_Z45 = bookHistogram1D(2, 1, 1);
-      
+   
       _h_dy_jet_Z25 = bookHistogram1D(3, 1, 1);
       _h_dy_jet_Z45 = bookHistogram1D(4, 1, 1);
-      
+   
       _h_yboost_jet_Z25 = bookHistogram1D(5, 1, 1);
       _h_yboost_jet_Z45 = bookHistogram1D(6, 1, 1);
-      
+   
       _inclusive_Z_sumofweights = 0.0;
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       const ZFinder& zfinder = applyProjection<ZFinder>(event, "ZFinder");
       if (zfinder.particles().size()==1) {
         // count inclusive sum of weights for histogram normalisation
         _inclusive_Z_sumofweights += weight;
-        
+     
         Jets jets;
         foreach (const Jet& j, applyProjection<JetAlg>(event, "ConeFinder").jetsByPt()) {
           if (fabs(j.momentum().pseudorapidity()) < 2.8) {
@@ -63,14 +63,14 @@
             break;
           }
         }
-        
+     
         // Return if there are no jets:
         if (jets.size() < 1) {
           getLog() << Log::DEBUG << "Skipping event " << event.genEvent().event_number()
                    << " because no jets pass cuts " << endl;
           vetoEvent;
         }
-        
+     
         // Cut on Delta R between jet and muons
         foreach (const Jet& j, jets) {
           foreach (const Particle& mu, zfinder.constituentsFinalState().particles()) {
@@ -79,7 +79,7 @@
             }
           }
         }
-        
+     
         const FourMomentum Zmom = zfinder.particles()[0].momentum();
         const FourMomentum jetmom = jets[0].momentum();
         double yZ = Zmom.rapidity();
@@ -87,7 +87,7 @@
         double dphi = deltaPhi(Zmom.phi(), jetmom.phi());
         double dy = fabs(yZ-yjet);
         double yboost = fabs(yZ+yjet)/2.0;
-        
+     
         if (Zmom.pT() > 25.0*GeV) {
           _h_dphi_jet_Z25->fill(dphi,weight);
           _h_dy_jet_Z25->fill(dy, weight);
@@ -99,10 +99,10 @@
           _h_yboost_jet_Z45->fill(yboost, weight);
         }
       }
-      
+   
     }
-    
-    
+ 
+ 
     void finalize() {
       if (_inclusive_Z_sumofweights == 0.0) return;
       scale(_h_dphi_jet_Z25, 1.0/_inclusive_Z_sumofweights);
@@ -112,7 +112,7 @@
       scale(_h_yboost_jet_Z25, 1.0/_inclusive_Z_sumofweights);
       scale(_h_yboost_jet_Z45, 1.0/_inclusive_Z_sumofweights);
     }
-    
+ 
     //@}
 
   private:
@@ -132,14 +132,14 @@
     AIDA::IHistogram1D *_h_yboost_jet_Z25;
     AIDA::IHistogram1D *_h_yboost_jet_Z45;
     //@}
-    
+ 
     double _inclusive_Z_sumofweights;
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<D0_2009_S8349509> plugin_D0_2009_S8349509;
-  
+
 }

Modified: trunk/src/Analyses/DELPHI_1995_S3137023.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_1995_S3137023.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/DELPHI_1995_S3137023.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,15 +16,15 @@
   public:
 
     /// Constructor
-    DELPHI_1995_S3137023() 
+    DELPHI_1995_S3137023()
       : Analysis("DELPHI_1995_S3137023")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
       _weightedTotalNumXiMinus = 0;
       _weightedTotalNumSigma1385Plus = 0;
     }
 
-    
+ 
     /// @name Analysis methods
     //@{
 
@@ -42,26 +42,26 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
         vetoEvent;
       }
       getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
-      
+   
       // Get event weight for histo filling
       const double weight = e.weight();
-      
+   
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-      
+   
       // Final state of unstable particles to get particle spectra
       const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
-      
+   
       foreach (const Particle& p, ufs.particles()) {
         const int id = abs(p.pdgId());
         switch (id) {
@@ -75,36 +75,36 @@
           break;
         }
       }
-      
-    }
-        
    
+    }
+     
+
     /// Finalize
-    void finalize() { 
+    void finalize() {
       normalize(_histXpXiMinus       , _weightedTotalNumXiMinus/sumOfWeights());
       normalize(_histXpSigma1385Plus , _weightedTotalNumSigma1385Plus/sumOfWeights());
     }
-    
+ 
     //@}
 
 
   private:
-    
+ 
     /// Store the weighted sums of numbers of charged / charged+neutral
-    /// particles - used to calculate average number of particles for the 
+    /// particles - used to calculate average number of particles for the
     /// inclusive single particle distributions' normalisations.
     double _weightedTotalNumXiMinus;
     double _weightedTotalNumSigma1385Plus;
-    
+ 
     AIDA::IHistogram1D *_histXpXiMinus;
     AIDA::IHistogram1D *_histXpSigma1385Plus;
     //@}
-    
+ 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<DELPHI_1995_S3137023> plugin_DELPHI_1995_S3137023;
-  
+
 }

Modified: trunk/src/Analyses/DELPHI_1996_S3430090.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_1996_S3430090.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/DELPHI_1996_S3430090.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -34,17 +34,17 @@
    */
   class DELPHI_1996_S3430090 : public Analysis {
   public:
-  
+
     /// Constructor
-    DELPHI_1996_S3430090() 
+    DELPHI_1996_S3430090()
       : Analysis("DELPHI_1996_S3430090")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
       _weightedTotalPartNum = 0;
       _passedCutWeightSum = 0;
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -65,31 +65,31 @@
       _histPtTOut = bookHistogram1D(2, 1, 1);
       _histPtSIn = bookHistogram1D(3, 1, 1);
       _histPtSOut = bookHistogram1D(4, 1, 1);
-      
+   
       _histRapidityT = bookHistogram1D(5, 1, 1);
       _histRapidityS = bookHistogram1D(6, 1, 1);
       _histScaledMom = bookHistogram1D(7, 1, 1);
       _histLogScaledMom = bookHistogram1D(8, 1, 1);
-      
+   
       _histPtTOutVsXp = bookProfile1D(9,  1, 1);
-      _histPtVsXp = bookProfile1D(10, 1, 1);    
-      
+      _histPtVsXp = bookProfile1D(10, 1, 1);
+   
       _hist1MinusT = bookHistogram1D(11, 1, 1);
       _histTMajor = bookHistogram1D(12, 1, 1);
       _histTMinor = bookHistogram1D(13, 1, 1);
       _histOblateness = bookHistogram1D(14, 1, 1);
-      
+   
       _histSphericity = bookHistogram1D(15, 1, 1);
       _histAplanarity = bookHistogram1D(16, 1, 1);
       _histPlanarity = bookHistogram1D(17, 1, 1);
-      
+   
       _histCParam = bookHistogram1D(18, 1, 1);
       _histDParam = bookHistogram1D(19, 1, 1);
-      
+   
       _histHemiMassH = bookHistogram1D(20, 1, 1);
       _histHemiMassL = bookHistogram1D(21, 1, 1);
       _histHemiMassD = bookHistogram1D(22, 1, 1);
-      
+   
       _histHemiBroadW = bookHistogram1D(23, 1, 1);
       _histHemiBroadN = bookHistogram1D(24, 1, 1);
       _histHemiBroadT = bookHistogram1D(25, 1, 1);
@@ -155,48 +155,48 @@
       const double weight = e.weight();
       _passedCutWeightSum += weight;
       _weightedTotalPartNum += numParticles * weight;
-      
+   
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-      
+   
       // Thrusts
       getLog() << Log::DEBUG << "Calculating thrust" << endl;
       const Thrust& thrust = applyProjection<Thrust>(e, "Thrust");
-      _hist1MinusT->fill(1 - thrust.thrust(), weight); 
-      _histTMajor->fill(thrust.thrustMajor(), weight); 
-      _histTMinor->fill(thrust.thrustMinor(), weight); 
+      _hist1MinusT->fill(1 - thrust.thrust(), weight);
+      _histTMajor->fill(thrust.thrustMajor(), weight);
+      _histTMinor->fill(thrust.thrustMinor(), weight);
       _histOblateness->fill(thrust.oblateness(), weight);
-      
+   
       // Jets
       const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
       if (durjet.clusterSeq()) {
-        _histDiffRate2Durham->fill(durjet.clusterSeq()->exclusive_ymerge(2), weight); 
-        _histDiffRate3Durham->fill(durjet.clusterSeq()->exclusive_ymerge(3), weight); 
-        _histDiffRate4Durham->fill(durjet.clusterSeq()->exclusive_ymerge(4), weight); 
+        _histDiffRate2Durham->fill(durjet.clusterSeq()->exclusive_ymerge(2), weight);
+        _histDiffRate3Durham->fill(durjet.clusterSeq()->exclusive_ymerge(3), weight);
+        _histDiffRate4Durham->fill(durjet.clusterSeq()->exclusive_ymerge(4), weight);
       }
       const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
       if (jadejet.clusterSeq()) {
-        _histDiffRate2Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(2), weight); 
-        _histDiffRate3Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(3), weight); 
-        _histDiffRate4Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(4), weight); 
+        _histDiffRate2Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(2), weight);
+        _histDiffRate3Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(3), weight);
+        _histDiffRate4Jade->fill(jadejet.clusterSeq()->exclusive_ymerge(4), weight);
       }
-      
+   
       // Sphericities
       getLog() << Log::DEBUG << "Calculating sphericity" << endl;
       const Sphericity& sphericity = applyProjection<Sphericity>(e, "Sphericity");
-      _histSphericity->fill(sphericity.sphericity(), weight); 
-      _histAplanarity->fill(sphericity.aplanarity(), weight); 
-      _histPlanarity->fill(sphericity.planarity(), weight); 
-      
+      _histSphericity->fill(sphericity.sphericity(), weight);
+      _histAplanarity->fill(sphericity.aplanarity(), weight);
+      _histPlanarity->fill(sphericity.planarity(), weight);
+   
       // C & D params
       getLog() << Log::DEBUG << "Calculating Parisi params" << endl;
       const ParisiTensor& parisi = applyProjection<ParisiTensor>(e, "Parisi");
       _histCParam->fill(parisi.C(), weight);
       _histDParam->fill(parisi.D(), weight);
-      
+   
       // Hemispheres
       getLog() << Log::DEBUG << "Calculating hemisphere variables" << endl;
       const Hemispheres& hemi = applyProjection<Hemispheres>(e, "Hemispheres");
@@ -207,7 +207,7 @@
       _histHemiBroadN->fill(hemi.Bmin(), weight);
       _histHemiBroadT->fill(hemi.Bsum(), weight);
       _histHemiBroadD->fill(hemi.Bdiff(), weight);
-      
+   
       // Iterate over all the charged final state particles.
       double Evis = 0.0;
       double Evis2 = 0.0;
@@ -217,14 +217,14 @@
         const Vector3 mom3 = p->momentum().vector3();
         const double energy = p->momentum().E();
         Evis += energy;
-        
+     
         // Scaled momenta.
         const double mom = mom3.mod();
         const double scaledMom = mom/meanBeamMom;
         const double logInvScaledMom = -std::log(scaledMom);
-        _histLogScaledMom->fill(logInvScaledMom, weight); 
-        _histScaledMom->fill(scaledMom, weight); 
-        
+        _histLogScaledMom->fill(logInvScaledMom, weight);
+        _histScaledMom->fill(scaledMom, weight);
+     
         // Get momenta components w.r.t. thrust and sphericity.
         const double momT = dot(thrust.thrustAxis(), mom3);
         const double momS = dot(sphericity.sphericityAxis(), mom3);
@@ -239,12 +239,12 @@
         _histPtSOut->fill(fabs(pToutS/GeV), weight);
         _histPtVsXp->fill(scaledMom, fabs(pT/GeV), weight);
         _histPtTOutVsXp->fill(scaledMom, fabs(pToutT/GeV), weight);
-        
+     
         // Calculate rapidities w.r.t. thrust and sphericity.
         const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
         const double rapidityS = 0.5 * std::log((energy + momS) / (energy - momS));
-        _histRapidityT->fill(rapidityT, weight); 
-        _histRapidityS->fill(rapidityS, weight); 
+        _histRapidityT->fill(rapidityT, weight);
+        _histRapidityS->fill(rapidityS, weight);
       }
       Evis2 = Evis*Evis;
 
@@ -264,13 +264,13 @@
           _histAEEC->fill(-cosij, -eec*weight);
         }
       }
-      
+   
       _histMultiCharged->fill(_histMultiCharged->binMean(0), numParticles*weight);
-      
-      
+   
+   
       // Final state of unstable particles to get particle spectra
       const UnstableFinalState& ufs = applyProjection<UnstableFinalState>(e, "UFS");
-      
+   
       foreach (const Particle& p, ufs.particles()) {
         int id = abs(p.pdgId());
         switch (id) {
@@ -359,21 +359,21 @@
 
 
     // Finalize
-    void finalize() { 
-      // Normalize inclusive single particle distributions to the average number 
+    void finalize() {
+      // Normalize inclusive single particle distributions to the average number
       // of charged particles per event.
       const double avgNumParts = _weightedTotalPartNum / _passedCutWeightSum;
 
       normalize(_histPtTIn, avgNumParts);
-      normalize(_histPtTOut, avgNumParts); 
+      normalize(_histPtTOut, avgNumParts);
       normalize(_histPtSIn, avgNumParts);
-      normalize(_histPtSOut, avgNumParts); 
+      normalize(_histPtSOut, avgNumParts);
 
-      normalize(_histRapidityT, avgNumParts); 
-      normalize(_histRapidityS, avgNumParts); 
+      normalize(_histRapidityT, avgNumParts);
+      normalize(_histRapidityS, avgNumParts);
 
       normalize(_histLogScaledMom, avgNumParts);
-      normalize(_histScaledMom, avgNumParts); 
+      normalize(_histScaledMom, avgNumParts);
 
       scale(_histEEC, 1.0/_passedCutWeightSum);
       scale(_histAEEC, 1.0/_passedCutWeightSum);
@@ -409,33 +409,33 @@
       scale(_histMultiXi1530_0, 1.0/_passedCutWeightSum);
       scale(_histMultiLambdaB0, 1.0/_passedCutWeightSum);
 
-      normalize(_hist1MinusT); 
-      normalize(_histTMajor); 
-      normalize(_histTMinor); 
-      normalize(_histOblateness); 
-
-      normalize(_histSphericity); 
-      normalize(_histAplanarity); 
-      normalize(_histPlanarity); 
-
-      normalize(_histHemiMassD); 
-      normalize(_histHemiMassH); 
-      normalize(_histHemiMassL); 
-
-      normalize(_histHemiBroadW); 
-      normalize(_histHemiBroadN); 
-      normalize(_histHemiBroadT); 
-      normalize(_histHemiBroadD); 
+      normalize(_hist1MinusT);
+      normalize(_histTMajor);
+      normalize(_histTMinor);
+      normalize(_histOblateness);
+
+      normalize(_histSphericity);
+      normalize(_histAplanarity);
+      normalize(_histPlanarity);
+
+      normalize(_histHemiMassD);
+      normalize(_histHemiMassH);
+      normalize(_histHemiMassL);
+
+      normalize(_histHemiBroadW);
+      normalize(_histHemiBroadN);
+      normalize(_histHemiBroadT);
+      normalize(_histHemiBroadD);
 
-      normalize(_histCParam); 
-      normalize(_histDParam); 
+      normalize(_histCParam);
+      normalize(_histDParam);
 
-      normalize(_histDiffRate2Durham); 
-      normalize(_histDiffRate2Jade); 
+      normalize(_histDiffRate2Durham);
+      normalize(_histDiffRate2Jade);
       normalize(_histDiffRate3Durham);
-      normalize(_histDiffRate3Jade); 
+      normalize(_histDiffRate3Jade);
       normalize(_histDiffRate4Durham);
-      normalize(_histDiffRate4Jade); 
+      normalize(_histDiffRate4Jade);
     }
 
     //@}
@@ -444,7 +444,7 @@
   private:
 
     /// Store the weighted sums of numbers of charged / charged+neutral
-    /// particles - used to calculate average number of particles for the 
+    /// particles - used to calculate average number of particles for the
     /// inclusive single particle distributions' normalisations.
     double _weightedTotalPartNum;
 
@@ -464,10 +464,10 @@
 
     AIDA::IProfile1D   *_histPtTOutVsXp, *_histPtVsXp;
 
-    AIDA::IHistogram1D *_hist1MinusT; 
-    AIDA::IHistogram1D *_histTMajor; 
-    AIDA::IHistogram1D *_histTMinor; 
-    AIDA::IHistogram1D *_histOblateness; 
+    AIDA::IHistogram1D *_hist1MinusT;
+    AIDA::IHistogram1D *_histTMajor;
+    AIDA::IHistogram1D *_histTMinor;
+    AIDA::IHistogram1D *_histOblateness;
 
     AIDA::IHistogram1D *_histSphericity;
     AIDA::IHistogram1D *_histAplanarity;
@@ -479,14 +479,14 @@
     AIDA::IHistogram1D *_histHemiMassD;
     AIDA::IHistogram1D *_histHemiMassH;
     AIDA::IHistogram1D *_histHemiMassL;
-               
+            
     AIDA::IHistogram1D *_histHemiBroadW;
     AIDA::IHistogram1D *_histHemiBroadN;
     AIDA::IHistogram1D *_histHemiBroadT;
     AIDA::IHistogram1D *_histHemiBroadD;
 
     AIDA::IHistogram1D *_histDiffRate2Durham;
-    AIDA::IHistogram1D *_histDiffRate2Jade; 
+    AIDA::IHistogram1D *_histDiffRate2Jade;
     AIDA::IHistogram1D *_histDiffRate3Durham;
     AIDA::IHistogram1D *_histDiffRate3Jade;
     AIDA::IHistogram1D *_histDiffRate4Durham;

Modified: trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/DELPHI_2002_069_CONF_603.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,17 +21,17 @@
   public:
 
     /// Constructor
-    DELPHI_2002_069_CONF_603() 
+    DELPHI_2002_069_CONF_603()
       : Analysis("DELPHI_2002_069_CONF_603")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
     }
 
 
     /// @name Analysis methods
     //@{
 
-    /// Book projections and histograms      
+    /// Book projections and histograms
     void init() {
       addProjection(Beam(), "Beams");
       addProjection(ChargedFinalState(), "FS");
@@ -48,30 +48,30 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
         vetoEvent;
       }
       getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
-      
+   
       // Get event weight for histo filling
       const double weight = e.weight();
-      
+   
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-      
-      
+   
+   
       foreach (const GenParticle* p, particles(e.genEvent())) {
         const GenVertex* pv = p->production_vertex();
         const GenVertex* dv = p->end_vertex();
         if (IS_BHADRON_PDGID(p->pdg_id())) {
           const double xp = p->momentum().e()/meanBeamMom;
-          
+       
           // If the B-hadron has a parton as parent, call it primary B-hadron:
           if (pv) {
             bool is_primary = false;
@@ -83,7 +83,7 @@
               _histMeanXbprim->fill(_histMeanXbprim->binMean(0), xp, weight);
             }
           }
-          
+       
           // If the B-hadron has no B-hadron as a child, it decayed weakly:
           if (dv) {
             bool is_weak = true;
@@ -98,11 +98,11 @@
               _histMeanXbweak->fill(_histMeanXbweak->binMean(0), xp, weight);
             }
           }
-          
+       
         }
       }
     }
-    
+ 
 
     // Finalize
     void finalize() {
@@ -114,7 +114,7 @@
   private:
 
     /// Store the weighted sums of numbers of charged / charged+neutral
-    /// particles - used to calculate average number of particles for the 
+    /// particles - used to calculate average number of particles for the
     /// inclusive single particle distributions' normalisations.
 
     AIDA::IHistogram1D *_histXbprim;

Modified: trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc
==============================================================================
--- trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/DELPHI_2003_WUD_03_11.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -33,13 +33,13 @@
       _numdurjets = 0;
       _numjadejets = 0;
     }
-    
+ 
 
 
     /// @name Jet angle calculator functions
     /// @todo These shouldn't be object methods, as they have no state!
     //@{
-    
+ 
     /// @todo Use Jet or FourMomentum interface rather than PseudoJet
     /// @todo Move to utils?
     double calc_BZ(const vector<fastjet::PseudoJet>& jets) {
@@ -51,7 +51,7 @@
 
 
     /// @todo Use Jet or FourMomentum interface rather than PseudoJet
-    /// @todo Move to utils? 
+    /// @todo Move to utils?
     double calc_KSW(const vector<fastjet::PseudoJet>& jets) {
       assert(jets.size() == 4);
       Vector3 p13 = cross( momentum3(jets[0]), momentum3(jets[2]));
@@ -61,10 +61,10 @@
       return cos (0.5*( acos (dot(p14,p23) / (p14.mod()*p23.mod())) +
                         acos (dot(p13,p24) / (p13.mod()*p24.mod())) ));
     }
-    
+ 
 
     /// @todo Use Jet or FourMomentum interface rather than PseudoJet
-    /// @todo Move to utils? 
+    /// @todo Move to utils?
     double calc_NR(const vector<fastjet::PseudoJet>& jets) {
       assert(jets.size() == 4);
       Vector3 p12 = momentum3(jets[0]) - momentum3(jets[1]);
@@ -73,7 +73,7 @@
     }
 
     /// @todo Use Jet or FourMomentum interface rather than PseudoJet
-    /// @todo Move to utils? 
+    /// @todo Move to utils?
     double calc_ALPHA34(const vector<fastjet::PseudoJet>& jets) {
       assert(jets.size() == 4);
       Vector3 p3 = momentum3(jets[2]);
@@ -109,17 +109,17 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
         vetoEvent;
       }
       getLog() << Log::DEBUG << "Passed multiplicity cut" << endl;
-      
+   
       // Get event weight for histo filling
       const double weight = e.weight();
-      
+   
       // Jets
       const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
       vector<fastjet::PseudoJet> jets_durham;
@@ -131,12 +131,12 @@
           _histDurhamNR->fill(fabs(calc_NR(jets_durham)), weight);
           _histDurhamALPHA34->fill(calc_ALPHA34(jets_durham), weight);
         }
-        if (durjet.clusterSeq()->exclusive_ymerge(3) > 0.008 && 
+        if (durjet.clusterSeq()->exclusive_ymerge(3) > 0.008 &&
             durjet.clusterSeq()->exclusive_ymerge(4) < 0.008) {
           _numdurjets++;
         }
       }
-      
+   
       const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
       vector<fastjet::PseudoJet> jets_jade;
       if (jadejet.clusterSeq()) {
@@ -147,20 +147,20 @@
           _histJadeNR->fill(fabs(calc_NR(jets_jade)), weight);
           _histJadeALPHA34->fill(calc_ALPHA34(jets_jade), weight);
         }
-        if (jadejet.clusterSeq()->exclusive_ymerge(3) > 0.015 && 
+        if (jadejet.clusterSeq()->exclusive_ymerge(3) > 0.015 &&
             jadejet.clusterSeq()->exclusive_ymerge(4) < 0.015) {
           _numjadejets++;
         }
       }
-      
-    }    
-    
-    
+   
+    }
+ 
+ 
     // Finalize
-    void finalize() { 
-      // Normalize inclusive single particle distributions to the average number 
+    void finalize() {
+      // Normalize inclusive single particle distributions to the average number
       // of charged particles per event.
-      
+   
       getLog() << Log::INFO << "Number of Durham jets = " << _numdurjets << endl;
       getLog() << Log::INFO << "Number of Jade jets   = " << _numjadejets << endl;
       normalize(_histDurhamBZ      , 0.0785);

Modified: trunk/src/Analyses/E735_1998_S3905616.cc
==============================================================================
--- trunk/src/Analyses/E735_1998_S3905616.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/E735_1998_S3905616.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,16 +9,16 @@
 
   class E735_1998_S3905616 : public Analysis {
   public:
-    
+ 
     /// Constructor
     E735_1998_S3905616() : Analysis("E735_1998_S3905616") {
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       const ChargedFinalState cfs;
       addProjection(cfs, "FS");
@@ -30,19 +30,19 @@
     void analyze(const Event& event) {
       const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Get the event weight
       const double weight = event.weight();
-      
+   
       // Fill histo of charged multiplicity distribution
       _hist_multiplicity->fill(numParticles, weight);
     }
-    
-    
+ 
+ 
     void finalize() {
       normalize(_hist_multiplicity);
     }
-    
+ 
     //@}
 
 
@@ -52,12 +52,12 @@
     //@{
     AIDA::IHistogram1D *_hist_multiplicity;
     //@}
-    
+ 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<E735_1998_S3905616> plugin_E735_1998_S3905616;
-  
+
 }

Modified: trunk/src/Analyses/ExampleAnalysis.cc
==============================================================================
--- trunk/src/Analyses/ExampleAnalysis.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/ExampleAnalysis.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,23 +10,23 @@
 #include "Rivet/Projections/Sphericity.hh"
 
 namespace Rivet {
-  
-  
+
+
   /// @brief Just measures a few random things as an example.
   class ExampleAnalysis : public Analysis {
   public:
-    
+ 
     /// Constructor
     ExampleAnalysis()
       : Analysis("EXAMPLE")
-    { 
+    {
       // No counters etc. to initialise, hence nothing to do here!
     }
-    
+ 
 
     /// @name Analysis methods
     //@{
-    
+ 
     /// Set up projections and book histograms
     void init() {
       // Projections
@@ -65,21 +65,21 @@
       getLog() << Log::DEBUG << "Total charged multiplicity = " << cm.totalMultiplicity()   << endl;
       getLog() << Log::DEBUG << "Hadron multiplicity = " << cnm.hadronMultiplicity() << endl;
       getLog() << Log::DEBUG << "Hadron charged multiplicity = " << cm.hadronMultiplicity()  << endl;
-      
+   
       const Thrust& t = applyProjection<Thrust>(event, "Thrust");
       getLog() << Log::DEBUG << "Thrust = " << t.thrust() << endl;
-      
+   
       const Sphericity& s = applyProjection<Sphericity>(event, "Sphericity");
       getLog() << Log::DEBUG << "Sphericity = " << s.sphericity() << endl;
       getLog() << Log::DEBUG << "Aplanarity = " << s.aplanarity() << endl;
-      
+   
       size_t num_b_jets = 0;
       const Jets jets = applyProjection<FastJets>(event, "Jets").jets();
       foreach (const Jet& j, jets) {
         if (j.containsBottom()) ++num_b_jets;
       }
       getLog() << Log::DEBUG << "#B-jets = " << num_b_jets << endl;
-      
+   
       // Fill histograms
       const double weight = event.weight();
       _histTot->fill(cnm.totalMultiplicity(), weight);
@@ -91,10 +91,10 @@
       _histSphericity->fill(s.sphericity(), weight);
       _histAplanarity->fill(s.aplanarity(), weight);
     }
-    
-    
+ 
+ 
     /// Finalize
-    void finalize() { 
+    void finalize() {
       normalize(_histTot);
       normalize(_histChTot);
       normalize(_histHadrTot);
@@ -109,7 +109,7 @@
 
 
   private:
-    
+ 
     //@{
     /// Histograms
     AIDA::IHistogram1D* _histTot;
@@ -123,7 +123,7 @@
     //@}
 
   };
-    
+ 
 
 
   // This global object acts as a hook for the plugin system

Modified: trunk/src/Analyses/H1_1994_S2919893.cc
==============================================================================
--- trunk/src/Analyses/H1_1994_S2919893.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/H1_1994_S2919893.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,5 +1,5 @@
 // -*- C++ -*-
-#include "Rivet/Analysis.hh" 
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Math/Constants.hh"
 #include "Rivet/Tools/ParticleIdUtils.hh"
@@ -27,17 +27,17 @@
       _w117 = make_pair(0.0, 0.0);
       _wEnergy = make_pair(0.0, 0.0);
     }
-    
+ 
 
 
     /// @name Analysis methods
     //@{
-    
+ 
     void analyze(const Event& event) {
       const FinalState& fs = applyProjection<FinalState>(event, "FS");
       const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
       const DISLepton& dl = applyProjection<DISLepton>(event,"Lepton");
-      
+   
       // Get the DIS kinematics
       double x  = dk.x();
       double w2 = dk.W2();
@@ -48,17 +48,17 @@
       double ptel = pT(leptonMom);
       double enel = leptonMom.E();
       double thel = leptonMom.angle(dk.beamHadron().momentum())/degree;
-      
+   
       // Extract the particles other than the lepton
       ParticleVector particles;
       particles.reserve(fs.particles().size());
       const GenParticle& dislepGP = dl.out().genParticle();
       foreach (const Particle& p, fs.particles()) {
-        const GenParticle& loopGP = p.genParticle(); 
+        const GenParticle& loopGP = p.genParticle();
         if (&loopGP == &dislepGP) continue;
         particles.push_back(p);
       }
-      
+   
       // Cut on the forward energy
       double efwd = 0.0;
       foreach (const Particle& p, particles) {
@@ -67,13 +67,13 @@
           efwd += p.momentum().E();
         }
       }
-      
+   
       // Apply the cuts
       // Lepton energy and angle, w2 and forward energy
       getLog()<<Log::DEBUG<<"enel/GeV = "<<enel/GeV<<", thel = "<<thel<<", w2 = "<<w2<<", efwd/GeV = "<<efwd/GeV<<std::endl;
       bool cut = enel/GeV > 14. && thel > 157. && thel < 172.5 && w2 >= 3000. && efwd/GeV > 0.5;
       if (!cut) vetoEvent;
-      
+   
       // Weight of the event
       const double weight = event.weight();
       // weights for x<1e-3 and x>1e-3
@@ -82,23 +82,23 @@
       } else {
         _wEnergy.second += weight;
       }
-      
+   
       // Boost to hadronic CM
       const LorentzTransform hcmboost = dk.boostHCM();
       // Loop over the particles
       long ncharged(0);
       for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
         const Particle& p = particles[ip1];
-        
+     
         double th = p.momentum().angle(dk.beamHadron().momentum()) / degree;
         // Boost momentum to lab
         const FourMomentum hcmMom = hcmboost.transform(p.momentum());
         // Angular cut
         if (th <= 4.4) continue;
-        
+     
         // Energy flow histogram
         double et = fabs(Et(hcmMom));
-        double eta = -hcmMom.pseudorapidity(); 
+        double eta = -hcmMom.pseudorapidity();
         if (x < 1e-3) {
           _histEnergyFlowLowX ->fill(eta, et*weight);
         } else {
@@ -110,7 +110,7 @@
             double xf= -2 * hcmMom.z() / w;
             double pt2 = pT2(hcmMom);
             if (w > 50. && w <= 100.) {
-              _histSpectraW77 ->fill(xf, weight); 
+              _histSpectraW77 ->fill(xf, weight);
             } else if (w > 100. && w <= 150.) {
               _histSpectraW122->fill(xf, weight);
             } else if (w > 150. && w <= 200.) {
@@ -139,7 +139,7 @@
 
           /// @todo Use angle function
           double deltaphi = phi1 - phi2;
-          if (fabs(deltaphi) > PI) 
+          if (fabs(deltaphi) > PI)
             deltaphi = fabs(fabs(deltaphi) - TWOPI);
           double eta2 = p2.momentum().pseudorapidity();
           double omega = sqrt(sqr(eta1-eta2) + sqr(deltaphi));
@@ -195,8 +195,8 @@
 
 
     /// Finalize
-    void finalize() { 
-      // Normalize inclusive single particle distributions to the average number 
+    void finalize() {
+      // Normalize inclusive single particle distributions to the average number
       // of charged particles per event.
       double avgNumParts = _w77.first/_w77.second;
       normalize(_histSpectraW77, avgNumParts);
@@ -214,7 +214,7 @@
       scale(_histEnergyFlowHighX, 1./_wEnergy.second);
 
       scale(_histEECLowX , 1./_wEnergy.first );
-      scale(_histEECHighX, 1./_wEnergy.second); 
+      scale(_histEECHighX, 1./_wEnergy.second);
     }
 
 

Modified: trunk/src/Analyses/H1_1995_S3167097.cc
==============================================================================
--- trunk/src/Analyses/H1_1995_S3167097.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/H1_1995_S3167097.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,11 +15,11 @@
 
     /// Constructor
     H1_1995_S3167097() : Analysis("H1_1995_S3167097")
-    { 
+    {
       setBeams(ELECTRON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -42,9 +42,9 @@
       _hAvQ2 = bookHistogram1D("23tmp", _nbin, 1.0, 10.0);
       _hN    = bookHistogram1D("24", _nbin, 1.0, 10.0);
     }
-    
-    
-    /// Calculate the bin number from the DISKinematics projection  
+ 
+ 
+    /// Calculate the bin number from the DISKinematics projection
     int _getbin(const DISKinematics& dk) {
       if ( dk.Q2() > 5.0*GeV2 && dk.Q2() <= 10.0*GeV2 ) {
         if ( dk.x() > 0.0001 && dk.x() <= 0.0002 )
@@ -72,62 +72,62 @@
       }
       return -1;
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const FinalStateHCM& fs = applyProjection<FinalStateHCM>(event, "FS");
       const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
       const CentralEtHCM y1 = applyProjection<CentralEtHCM>(event, "Y1HCM");
-      
+   
       const int ibin = _getbin(dk);
       if (ibin < 0) vetoEvent;
       const double weight = event.weight();
-      
+   
       for (size_t i = 0, N = fs.particles().size(); i < N; ++i) {
         const double rap = fs.particles()[i].momentum().rapidity();
         const double et = fs.particles()[i].momentum().Et();
         _hEtFlow[ibin]->fill(rap, weight * et/GeV);
         _hEtFlowStat[ibin]->fill(rap, weight * et/GeV);
       }
-      
+   
       _nev[ibin] += weight;
       _hAvEt->fill(ibin + 1.5, weight * y1.sumEt()/GeV);
       _hAvX->fill(ibin + 1.5, weight * dk.x());
       _hAvQ2->fill(ibin + 1.5, weight * dk.Q2()/GeV2);
       _hN->fill(ibin + 1.5, weight);
     }
-    
-    
+ 
+ 
     void finalize() {
       for (size_t ibin = 0; ibin < _nbin; ++ibin) {
         _hEtFlow[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
         _hEtFlowStat[ibin]->scale(1.0/(_nev[ibin]*double(_nb)/(_xmax-_xmin)));
       }
-      
+   
       /// @todo Automate this sort of thing so that the analysis code is more readable.
       AIDA::IDataPointSet* h = 0;
       h = histogramFactory().divide("/H1_1995_S3167097/21", *_hAvEt, *_hN);
       h->setTitle(_hAvEt->title());
       histogramFactory().destroy(_hAvEt);
-      
+   
       h = histogramFactory().divide("/H1_1995_S3167097/22", *_hAvX, *_hN);
       h->setTitle(_hAvX->title());
       histogramFactory().destroy(_hAvX);
-      
+   
       h = histogramFactory().divide("/H1_1995_S3167097/23", *_hAvQ2, *_hN);
       h->setTitle(_hAvQ2->title());
       histogramFactory().destroy(_hAvQ2);
     }
-    
+ 
     //@}
 
-    
+ 
   private:
 
     /// Some integer constants used.
     /// @todo Remove statics!
     static const size_t _nb = 24, _nbin = 9;
-    
+ 
     /// Some double constants used.
     /// @todo Remove statics!
     static const double _xmin, _xmax;

Modified: trunk/src/Analyses/H1_2000_S4129130.cc
==============================================================================
--- trunk/src/Analyses/H1_2000_S4129130.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/H1_2000_S4129130.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,5 +1,5 @@
 // -*- C++ -*-
-#include "Rivet/Analysis.hh" 
+#include "Rivet/Analysis.hh"
 #include "Rivet/RivetAIDA.hh"
 #include "Rivet/Math/Constants.hh"
 #include "Rivet/Tools/ParticleIdUtils.hh"
@@ -20,8 +20,8 @@
     {
       setBeams(ELECTRON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -36,7 +36,7 @@
       double x   = dk.x();
       double y   = dk.y();
       double w2  = dk.W2();
-      
+   
       // Momentum of the scattered lepton
       FourMomentum leptonMom = dl.out().momentum();
       // pT energy and angle
@@ -49,11 +49,11 @@
       const GenParticle& dislepGP = dl.out().genParticle();
       for (ParticleVector::const_iterator p = fs.particles().begin();
            p != fs.particles().end(); ++p) {
-        const GenParticle& loopGP = p->genParticle(); 
+        const GenParticle& loopGP = p->genParticle();
         if (&loopGP == &dislepGP) continue;
         particles.push_back(*p);
       }
-      
+   
       // Cut on the forward energy
       double efwd = 0.;
       foreach (const Particle& p, particles) {
@@ -65,25 +65,25 @@
       bool evcut[4];
       // Low  Q2 selection a
       /// @todo Units and inRange
-      evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 && 
+      evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 &&
         thel > 157. && thel < 176.0;
       // Low  Q2 selection b
       /// @todo Units and inRange
       evcut[1] = enel/GeV > 12. && y > 0.3 && y < 0.5;
       // High Q2 selection a
       /// @todo Units and inRange
-      evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 && 
+      evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
         w2 >= 4400. && efwd > 0.5;
       // High Q2 selection b
       /// @todo Units and inRange
       evcut[3] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
         w2 > 27110. && w2 < 45182.;
-      
+   
       // Veto if fails all cuts
       if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]) ) {
         vetoEvent;
       }
-      
+   
       // Find the bins
       int bin[4] = {-1,-1,-1,-1};
       // For the low Q2 selection a)
@@ -152,20 +152,20 @@
       else if (q2 > 220. && q2 <= 400.) bin[3] = 1;
       else if (q2 > 400.              ) bin[3] = 2;
       evcut[3] &= bin[3] >= 0;
-      
+   
       // Veto if fails all cuts after bin selection
       if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]));
-      
+   
       // Increment the count for normalisation
       const double weight = event.weight();
       if (evcut[0]) _weightETLowQa [bin[0]] += weight;
       if (evcut[1]) _weightETLowQb [bin[1]] += weight;
       if (evcut[2]) _weightETHighQa[bin[2]] += weight;
       if (evcut[3]) _weightETHighQb[bin[3]] += weight;
-      
+   
       // Boost to hadronicCM
       const LorentzTransform hcmboost = dk.boostHCM();
-      
+   
       // Loop over the particles
       double etcent = 0;
       double etfrag = 0;
@@ -189,17 +189,17 @@
         _histAverETFrag   ->fill(q2, etfrag*weight,weight);
       }
     }
-    
-    
+ 
+ 
     void init() {
       // Projections
       addProjection(DISLepton(), "Lepton");
       addProjection(DISKinematics(), "Kinematics");
       addProjection(FinalState(), "FS");
-      
+   
       // Histos
       IHistogram1D* h = 0;
-      
+   
       // Histograms and weight vectors for low Q^2 a
       _histETLowQa.reserve(17);
       _weightETLowQa.reserve(17);
@@ -208,7 +208,7 @@
         _histETLowQa.push_back(h);
         _weightETLowQa.push_back(0.);
       }
-      
+   
       // Histograms and weight vectors for high Q^2 a
       _histETHighQa.reserve(7);
       _weightETHighQa.reserve(7);
@@ -217,7 +217,7 @@
         _histETHighQa.push_back(h);
         _weightETHighQa.push_back(0.);
       }
-      
+   
       // Histograms and weight vectors for low Q^2 b
       _histETLowQb.reserve(5);
       _weightETLowQb.reserve(5);
@@ -226,7 +226,7 @@
         _histETLowQb.push_back(h);
         _weightETLowQb.push_back(0.);
       }
-      
+   
       // Histograms and weight vectors for high Q^2 b
       _histETHighQb.reserve(3);
       _weightETHighQb.reserve(3);
@@ -235,15 +235,15 @@
         _histETHighQb.push_back(h);
         _weightETHighQb.push_back(0.0);
       }
-      
+   
       // Histograms for the averages
       _histAverETCentral = bookProfile1D(33,  1, 1);
       _histAverETFrag = bookProfile1D(34,  1, 1);
     }
-    
-    
+ 
+ 
     // Finalize
-    void finalize() { 
+    void finalize() {
       // Normalization of the Et distributions
       for (size_t ix=0; ix<17; ++ix) {
         scale(_histETLowQa[ix], 1./_weightETLowQa[ix]);
@@ -258,13 +258,13 @@
         scale(_histETHighQb[ix], 1./_weightETHighQb[ix]);
       }
     }
-    
+ 
 
     //@}
 
 
   private:
-    
+ 
     /// Polar angle with right direction of the beam
     inline double beamAngle(const FourVector& v, const bool & order) {
       double thel = v.polarAngle()/degree;

Modified: trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc
==============================================================================
--- trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/JADE_OPAL_2000_S4300807.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -28,12 +28,12 @@
     JADE_OPAL_2000_S4300807() : Analysis("JADE_OPAL_2000_S4300807"),
         _initialised(false)
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
     }
-    
+ 
     //@}
 
-    
+ 
     /// @name Analysis methods
     //@{
 
@@ -48,7 +48,7 @@
 
 
     void analyze(const Event& e) {
-      
+   
       // Which CMS energy are we running at?
       if (!_initialised) {
         const double sqrts = applyProjection<Beam>(e, "Beams").sqrtS()/GeV;
@@ -63,7 +63,7 @@
         case 183: offset = 13; break;
         case 189: offset = 14; break;
         default:
-          getLog() << Log::ERROR 
+          getLog() << Log::ERROR
               << "CMS energy of events sqrt(s) = " << sqrts
               <<" doesn't match any available analysis energy." << endl;
           /// @todo Really call exit()? I don't like the break of "command chain" that this implies
@@ -76,19 +76,19 @@
         }
         _initialised = true;
       }
-        
-      
+     
+   
       // Jets
       getLog() << Log::DEBUG << "Using FastJet JADE patch to make diff jet rate plots:" << endl;
       const double weight = e.weight();
-      
+   
       const FastJets& jadejet = applyProjection<FastJets>(e, "JadeJets");
       if (jadejet.clusterSeq()) {
         double y_23 = jadejet.clusterSeq()->exclusive_ymerge(2);
         double y_34 = jadejet.clusterSeq()->exclusive_ymerge(3);
         double y_45 = jadejet.clusterSeq()->exclusive_ymerge(4);
         double y_56 = jadejet.clusterSeq()->exclusive_ymerge(5);
-        
+     
         for (int i = 0; i < _h_R_Jade[0]->size(); ++i) {
           IDataPoint* dp = _h_R_Jade[0]->point(i);
           if (y_23 < dp->coordinate(0)->value()) {
@@ -124,19 +124,19 @@
           }
         }
       }
-      
+   
       const FastJets& durjet = applyProjection<FastJets>(e, "DurhamJets");
       if (durjet.clusterSeq()) {
         double y_23 = durjet.clusterSeq()->exclusive_ymerge(2);
         double y_34 = durjet.clusterSeq()->exclusive_ymerge(3);
         double y_45 = durjet.clusterSeq()->exclusive_ymerge(4);
         double y_56 = durjet.clusterSeq()->exclusive_ymerge(5);
-        
+     
         _h_y_Durham[0]->fill(y_23, weight);
         _h_y_Durham[1]->fill(y_34, weight);
         _h_y_Durham[2]->fill(y_45, weight);
         _h_y_Durham[3]->fill(y_56, weight);
-        
+     
         for (int i = 0; i < _h_R_Durham[0]->size(); ++i) {
           IDataPoint* dp = _h_R_Durham[0]->point(i);
           if (y_23 < dp->coordinate(0)->value()) {
@@ -181,7 +181,7 @@
       for (size_t n = 0; n < 4; ++n) {
         scale(_h_y_Durham[n], 1.0/sumOfWeights());
       }
-      
+   
       for (size_t n = 0; n < 5; ++n) {
         /// scale integrated jet rates to 100%
         for (int i = 0; i < _h_R_Jade[n]->size(); ++i) {
@@ -194,10 +194,10 @@
         }
       }
     }
-    
+ 
     //@}
-    
-    
+ 
+ 
   private:
 
     /// @name Histograms

Modified: trunk/src/Analyses/MC_JetAnalysis.cc
==============================================================================
--- trunk/src/Analyses/MC_JetAnalysis.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_JetAnalysis.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,33 +20,33 @@
 
   // Book histograms
   void MC_JetAnalysis::init() {
-    
+ 
     for (size_t i=0; i<m_njet; ++i) {
       stringstream dname;
       dname<<"log10_d_"<<i<<i+1;
       _h_log10_d[i] = bookHistogram1D(dname.str(), 50, 0.2, 2.6);
-      
+   
       stringstream Rname;
       Rname<<"log10_R_"<<i;
       _h_log10_R[i] = bookDataPointSet(Rname.str(), 50, 0.2, 2.6);
-      
+   
       stringstream pTname;
       pTname<<"jet_pT_"<<i+1;
       double pTmax = 1.0/(double(i)+2.0)*m_sqrts/2.0;
       int nbins = 100/(i+1);
       _h_pT_jet[i] = bookHistogram1D(pTname.str(), nbins, 0.0, pTmax);
-      
+   
       stringstream etaname;
       etaname<<"jet_eta_"<<i+1;
       _h_eta_jet[i] = bookHistogram1D(etaname.str(), 50, -5.0, 5.0);
-      
+   
       for (size_t j=i+1; j<m_njet; ++j) {
         std::pair<size_t, size_t> ij(std::make_pair(i, j));
-        
+     
         stringstream detaname;
         detaname<<"jets_deta_"<<i+1<<j+1;
         _h_deta_jets.insert(make_pair(ij, bookHistogram1D(detaname.str(), 50, -5.0, 5.0)));
-        
+     
         stringstream dRname;
         dRname<<"jets_dR_"<<i+1<<j+1;
         _h_dR_jets.insert(make_pair(ij, bookHistogram1D(dRname.str(), 25, 0.0, 5.0)));
@@ -55,7 +55,7 @@
     stringstream Rname;
     Rname<<"log10_R_"<<m_njet;
     _h_log10_R[m_njet] = bookDataPointSet(Rname.str(), 50, 0.2, 2.6);
-    
+ 
     _h_jet_multi_exclusive = bookHistogram1D("jet_multi_exclusive", m_njet+3, -0.5, m_njet+3-0.5);
     _h_jet_multi_inclusive = bookHistogram1D("jet_multi_inclusive", m_njet+3, -0.5, m_njet+3-0.5);
     _h_jet_multi_ratio = bookDataPointSet("jet_multi_ratio", m_njet+2, 0.5, m_njet+3-0.5);
@@ -63,10 +63,10 @@
 
 
 
-  // Do the analysis 
+  // Do the analysis
   void MC_JetAnalysis::analyze(const Event & e) {
     double weight = e.weight();
-    
+ 
     const FastJets& jetpro = applyProjection<FastJets>(e, m_jetpro_name);
 
     // jet resolutions and integrated jet rates
@@ -76,10 +76,10 @@
       for (size_t i=0; i<m_njet; ++i) {
         // jet resolution i -> j
         double d_ij=log10(sqrt(seq->exclusive_dmerge_max(i)));
-        
+     
         // fill differential jet resolution
         _h_log10_d[i]->fill(d_ij, weight);
-        
+     
         // fill integrated jet resolution
         for (int ibin=0; ibin<_h_log10_R[i]->size(); ++ibin) {
           IDataPoint* dp=_h_log10_R[i]->point(ibin);
@@ -101,13 +101,13 @@
     }
 
     const Jets& jets = jetpro.jetsByPt(20.0);
-    
+ 
     // the remaining direct jet observables
     for (size_t i=0; i<m_njet; ++i) {
       if (jets.size()<i+1) continue;
       _h_pT_jet[i]->fill(jets[i].momentum().pT(), weight);
       _h_eta_jet[i]->fill(jets[i].momentum().eta(), weight);
-      
+   
       for (size_t j=i+1; j<m_njet; ++j) {
         if (jets.size()<j+1) continue;
         std::pair<size_t, size_t> ij(std::make_pair(i, j));
@@ -135,10 +135,10 @@
         IDataPoint* dp=_h_log10_R[i]->point(ibin);
         dp->coordinate(1)->setValue(dp->coordinate(1)->value()*crossSection()/sumOfWeights());
       }
-      
+   
       scale(_h_pT_jet[i], crossSection()/sumOfWeights());
       scale(_h_eta_jet[i], crossSection()/sumOfWeights());
-      
+   
       for (size_t j=i+1; j<m_njet; ++j) {
       }
     }

Modified: trunk/src/Analyses/MC_LHC_DIJET.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_DIJET.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_DIJET.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,15 +12,15 @@
   public:
 
     /// Default constructor
-    MC_LHC_DIJET() 
+    MC_LHC_DIJET()
       : Analysis("MC_LHC_DIJET") {
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
-    void init() { 
+    void init() {
       FinalState fs(-4, 4, 0.5*GeV);
       ChargedFinalState cfs(fs);
       addProjection(fs, "FS");
@@ -38,18 +38,18 @@
       _hist_jetdphi = bookHistogram1D("d08-x01-y01", 24, 0., 6.4);
       _hist_jetdeta = bookHistogram1D("d09-x01-y01", 24, 0., 6.);
       _hist_chargemultiplicity = bookHistogram1D("d10-x01-y01",30, 0.5, 250.5);
-      _hist_chargemeanpt = bookHistogram1D("d11-x01-y01", 25, 0., 10.);    
+      _hist_chargemeanpt = bookHistogram1D("d11-x01-y01", 25, 0., 10.);
       _hist_chargept = bookHistogram1D("d12-x01-y01", 32, 0., 25.);
-      _hist_chargelogpt = bookHistogram1D("d13-x01-y01", 32, 0., 6.);    
+      _hist_chargelogpt = bookHistogram1D("d13-x01-y01", 32, 0., 6.);
       _hist_chargermspt = bookHistogram1D("d14-x01-y01", 32, 0., 10.);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
       const Jets jets = fastjets.jetsByPt(20.);
       const double weight = event.weight();
-      
+   
       if (jets.size() < 2 || jets.size() >= 3) vetoEvent;
       const double angle = fabs(jets[1].momentum().azimuthalAngle() - jets[0].momentum().azimuthalAngle());
       const double prapidity = fabs(jets[1].momentum().pseudorapidity() - jets[0].momentum().pseudorapidity());
@@ -58,14 +58,14 @@
       _hist_secondleadingjetpt->fill(jets[1].momentum().pT(), weight);
       _hist_jetdphi->fill(angle , weight);
       _hist_jetdeta->fill(prapidity, weight);
-      
+   
       foreach(Jet j, fastjets.jetsByPt(20*GeV)) {
         _hist_jetpt->fill(j.momentum().pT(), weight);
         _hist_jetptlog->fill(log(j.momentum().pT()), weight);
         _hist_jetphi->fill(j.momentum().azimuthalAngle(), weight);
         _hist_jeteta->fill(j.momentum().pseudorapidity(), weight);	
       }
-      
+   
       const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
       // const FastJets& cfastjets = applyProjection<FastJets>(event, "ChargedJets");
       double meanpt(0), rmspt(0);
@@ -84,17 +84,17 @@
       _hist_chargermspt->fill(rmspt, weight);
       // }
     }
-    
-    
+ 
+ 
     void finalize() {
       /// @todo Normalise!
     }
-    
+ 
     //@}
 
 
   private:
-    
+ 
     AIDA::IHistogram1D* _hist_jetcount;
     AIDA::IHistogram1D* _hist_jetpt;
     AIDA::IHistogram1D* _hist_jetptlog;
@@ -109,12 +109,12 @@
     AIDA::IHistogram1D* _hist_chargept;
     AIDA::IHistogram1D* _hist_chargelogpt;
     AIDA::IHistogram1D* _hist_chargermspt;
-    
+ 
   };
-  
 
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_LHC_DIJET> plugin_MC_LHC_DIJET;
-  
+
 }

Modified: trunk/src/Analyses/MC_LHC_DIPHOTON.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_DIPHOTON.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_DIPHOTON.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -23,7 +23,7 @@
     void init() {
       FinalState fs;
       addProjection(fs, "FS");
-      
+   
       IdentifiedFinalState ifs(-2.0, 2.0, 20.0*GeV);
       ifs.acceptId(PHOTON);
       addProjection(ifs, "IFS");
@@ -36,13 +36,13 @@
 
     void analyze(const Event& event) {
       const double weight = event.weight();
-      
+   
       ParticleVector photons = applyProjection<IdentifiedFinalState>(event, "IFS").particles();
-      
+   
       if (photons.size() < 2) {
         vetoEvent;
       }
-      
+   
       // Isolate photons with ET_sum in cone
       ParticleVector isolated_photons;
       ParticleVector fs = applyProjection<FinalState>(event, "FS").particles();
@@ -59,19 +59,19 @@
           isolated_photons.push_back(photon);
         }
       }
-      
+   
       if (isolated_photons.size() != 2) {
         vetoEvent;
       }
-      
+   
       FourMomentum mom_PP = isolated_photons[0].momentum() + isolated_photons[1].momentum();
       _h_m_PP->fill(mom_PP.mass(), weight);
       _h_pT_PP->fill(mom_PP.pT(), weight);
       _h_dphi_PP->fill(mapAngle0ToPi(isolated_photons[0].momentum().phi()-
                                      isolated_photons[1].momentum().phi())/M_PI, weight);
     }
-    
-    
+ 
+ 
     void finalize() {
       scale(_h_m_PP, crossSection()/sumOfWeights());
       scale(_h_pT_PP, crossSection()/sumOfWeights());
@@ -89,8 +89,8 @@
     AIDA::IHistogram1D* _h_pT_PP;
     AIDA::IHistogram1D* _h_dphi_PP;
     //@}
-    
-    
+ 
+ 
   };
 
 

Modified: trunk/src/Analyses/MC_LHC_LEADINGJETS.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_LEADINGJETS.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_LEADINGJETS.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,28 +11,28 @@
 
   /* Underlying event in leading jet, extended to the LHC
    * @author Andy Buckley
-   */ 
+   */
   class MC_LHC_LEADINGJETS : public Analysis {
   public:
-    
+ 
     /// Constructor
     MC_LHC_LEADINGJETS()
       : Analysis("MC_LHC_LEADINGJETS")
-    { 
+    {
       setBeams(PROTON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     // Book histograms
     void init() {
       // Final state for the jet finding
       const FinalState fsj(-4.0, 4.0, 0.0*GeV);
       addProjection(fsj, "FSJ");
       addProjection(FastJets(fsj, FastJets::KT, 0.7), "Jets");
-      
+   
       // Charged final state for the distributions
       const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
       addProjection(cfs, "CFS");
@@ -116,8 +116,8 @@
           if (pT > ptMaxAway) ptMaxAway = pT;
         }
       }
-      
-      
+   
+   
       // Fill the histograms
       //_hist_tnchg->fill(jetpT/GeV, numToward/(4*PI/3), weight);
       _hist_pnchg->fill(jetpT/GeV, (numTrans1+numTrans2)/(4*PI/3), weight);
@@ -125,14 +125,14 @@
       _hist_pminnchg->fill(jetpT/GeV, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
       //_hist_pdifnchg->fill(jetpT/GeV, abs(numTrans1-numTrans2)/(2*PI/3), weight);
       //_hist_anchg->fill(jetpT/GeV, numAway/(4*PI/3), weight);
-      
+   
       //_hist_tcptsum->fill(jetpT/GeV, ptSumToward/GeV/(4*PI/3), weight);
       _hist_pcptsum->fill(jetpT/GeV, (ptSumTrans1+ptSumTrans2)/GeV/(4*PI/3), weight);
       _hist_pmaxcptsum->fill(jetpT/GeV, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/GeV/(2*PI/3), weight);
       _hist_pmincptsum->fill(jetpT/GeV, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/GeV/(2*PI/3), weight);
       //_hist_pdifcptsum->fill(jetpT/GeV, fabs(ptSumTrans1-ptSumTrans2)/GeV/(2*PI/3), weight);
       //_hist_acptsum->fill(jetpT/GeV, ptSumAway/GeV/(4*PI/3), weight);
-      
+   
       //if (numToward > 0) {
       //  _hist_tcptave->fill(jetpT/GeV, ptSumToward/GeV/numToward, weight);
       //  _hist_tcptmax->fill(jetpT/GeV, ptMaxToward/GeV, weight);
@@ -146,28 +146,28 @@
       //  _hist_acptmax->fill(jetpT/GeV, ptMaxAway/GeV, weight);
       //}
     }
-    
-    
-    void finalize() {  
+ 
+ 
+    void finalize() {
       //
     }
-    
-    
+ 
+ 
   private:
-    
+ 
     AIDA::IProfile1D *_hist_pnchg;
     AIDA::IProfile1D *_hist_pmaxnchg;
     AIDA::IProfile1D *_hist_pminnchg;
     AIDA::IProfile1D *_hist_pcptsum;
     AIDA::IProfile1D *_hist_pmaxcptsum;
     AIDA::IProfile1D *_hist_pmincptsum;
-    AIDA::IProfile1D *_hist_pcptave;  
-    
+    AIDA::IProfile1D *_hist_pcptave;
+ 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_LHC_LEADINGJETS> plugin_MC_LHC_LEADINGJETS;
-  
+
 }

Modified: trunk/src/Analyses/MC_LHC_PHOTONJETUE.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_PHOTONJETUE.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_PHOTONJETUE.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,28 +11,28 @@
 
   /* Underlying event in jet + isolated photon events
    * @author Andy Buckley
-   */ 
+   */
   class MC_LHC_PHOTONJETUE : public Analysis {
   public:
-    
+ 
     /// Constructor
     MC_LHC_PHOTONJETUE()
       : Analysis("MC_LHC_PHOTONJETUE")
-    { 
+    {
       setBeams(PROTON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     // Book histograms and projections
     void init() {
       // Final state for the jet finding
       const FinalState fsj(-4.0, 4.0, 0.1*GeV);
       addProjection(fsj, "FSJ");
       addProjection(FastJets(fsj, FastJets::ANTIKT, 0.7), "Jets");
-      
+   
       // Charged final state for the distributions
       const ChargedFinalState cfs(-2.0, 2.0, 0.2*GeV);
       addProjection(cfs, "Tracks");
@@ -98,7 +98,7 @@
         getLog() << Log::DEBUG << "No hard photons found" << endl;
         vetoEvent;
       }
-      const FourMomentum pgamma = photonfs.particlesByPt().front().momentum();      
+      const FourMomentum pgamma = photonfs.particlesByPt().front().momentum();
 
       // Check that leading photon is isolated from jets
       bool isolated = true;
@@ -209,12 +209,12 @@
 
 
       }
-      
-      
+   
+   
       // Fill the histograms
       _hist_pnchg_jet->fill(jetpT/GeV, (numTrans1_jet+numTrans2_jet)/(4*PI/3), weight);
       _hist_pmaxnchg_jet->fill(jetpT/GeV, (numTrans1_jet>numTrans2_jet ? numTrans1_jet : numTrans2_jet)/(2*PI/3), weight);
-      _hist_pminnchg_jet->fill(jetpT/GeV, (numTrans1_jet<numTrans2_jet ? numTrans1_jet : numTrans2_jet)/(2*PI/3), weight);      
+      _hist_pminnchg_jet->fill(jetpT/GeV, (numTrans1_jet<numTrans2_jet ? numTrans1_jet : numTrans2_jet)/(2*PI/3), weight);
       _hist_pcptsum_jet->fill(jetpT/GeV, (ptSumTrans1_jet+ptSumTrans2_jet)/GeV/(4*PI/3), weight);
       _hist_pmaxcptsum_jet->fill(jetpT/GeV, (ptSumTrans1_jet>ptSumTrans2_jet ? ptSumTrans1_jet : ptSumTrans2_jet)/GeV/(2*PI/3), weight);
       _hist_pmincptsum_jet->fill(jetpT/GeV, (ptSumTrans1_jet<ptSumTrans2_jet ? ptSumTrans1_jet : ptSumTrans2_jet)/GeV/(2*PI/3), weight);
@@ -224,7 +224,7 @@
       //
       _hist_pnchg_gamma->fill(gammapT/GeV, (numTrans1_gamma+numTrans2_gamma)/(4*PI/3), weight);
       _hist_pmaxnchg_gamma->fill(gammapT/GeV, (numTrans1_gamma>numTrans2_gamma ? numTrans1_gamma : numTrans2_gamma)/(2*PI/3), weight);
-      _hist_pminnchg_gamma->fill(gammapT/GeV, (numTrans1_gamma<numTrans2_gamma ? numTrans1_gamma : numTrans2_gamma)/(2*PI/3), weight);      
+      _hist_pminnchg_gamma->fill(gammapT/GeV, (numTrans1_gamma<numTrans2_gamma ? numTrans1_gamma : numTrans2_gamma)/(2*PI/3), weight);
       _hist_pcptsum_gamma->fill(gammapT/GeV, (ptSumTrans1_gamma+ptSumTrans2_gamma)/GeV/(4*PI/3), weight);
       _hist_pmaxcptsum_gamma->fill(gammapT/GeV, (ptSumTrans1_gamma>ptSumTrans2_gamma ? ptSumTrans1_gamma : ptSumTrans2_gamma)/GeV/(2*PI/3), weight);
       _hist_pmincptsum_gamma->fill(gammapT/GeV, (ptSumTrans1_gamma<ptSumTrans2_gamma ? ptSumTrans1_gamma : ptSumTrans2_gamma)/GeV/(2*PI/3), weight);
@@ -233,18 +233,18 @@
       }
 
     }
-    
-    
-    void finalize() {  
+ 
+ 
+    void finalize() {
       //
     }
-    
-    
+ 
+ 
   private:
 
     AIDA::IHistogram1D* _hist_jetgamma_dR;
     AIDA::IHistogram1D* _hist_jetgamma_dphi;
-    
+ 
     AIDA::IProfile1D *_hist_pnchg_jet, *_hist_pnchg_gamma;
     AIDA::IProfile1D *_hist_pmaxnchg_jet, *_hist_pmaxnchg_gamma;
     AIDA::IProfile1D *_hist_pminnchg_jet, *_hist_pminnchg_gamma;
@@ -254,10 +254,10 @@
     AIDA::IProfile1D *_hist_pcptave_jet, *_hist_pcptave_gamma;
 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_LHC_PHOTONJETUE> plugin_MC_LHC_PHOTONJETUE;
-  
+
 }

Modified: trunk/src/Analyses/MC_LHC_SUSY.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_SUSY.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_SUSY.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,21 +15,21 @@
 
   /* Basic SUSY type validation analysis for the LHC
    * @author Andy Buckley
-   */ 
+   */
   class MC_LHC_SUSY : public Analysis {
   public:
-    
+ 
     /// Constructor
     MC_LHC_SUSY()
       : Analysis("MC_LHC_SUSY")
-    { 
+    {
       setBeams(PROTON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
-    
+ 
     // Book histograms
     void init() {
       // Basic final state
@@ -236,28 +236,28 @@
       if (eminus_ok && eplus_ok) {
         const double m_ee = FourMomentum(peplus + peminus).mass();
         _hist_mll_ossf_ee->fill(m_ee/GeV, weight);
-        if (epluses.size() == 1 && eminuses.size() == 1) 
+        if (epluses.size() == 1 && eminuses.size() == 1)
           _hist_mll_2_ossf_ee->fill(m_ee/GeV, weight);
       }
       // m_mumu
       if (muminus_ok && muplus_ok) {
         const double m_mumu = FourMomentum(pmuplus + pmuminus).mass();
         _hist_mll_ossf_mumu->fill(m_mumu/GeV, weight);
-        if (mupluses.size() == 1 && muminuses.size() == 1) 
+        if (mupluses.size() == 1 && muminuses.size() == 1)
           _hist_mll_2_ossf_mumu->fill(m_mumu/GeV, weight);
       }
       // m_emu (both configurations)
       if (eminus_ok && muplus_ok) {
         const double m_emu = FourMomentum(pmuplus + peminus).mass();
         _hist_mll_osof_emu->fill(m_emu/GeV, weight);
-        if (mupluses.size() == 1 && eminuses.size() == 1) 
+        if (mupluses.size() == 1 && eminuses.size() == 1)
           _hist_mll_2_osof_emu->fill(m_emu/GeV, weight);
 
       }
       if (muminus_ok && eplus_ok) {
         const double m_mue = FourMomentum(peplus + pmuminus).mass();
         _hist_mll_osof_emu->fill(m_mue/GeV, weight);
-        if (epluses.size() == 1 && muminuses.size() == 1) 
+        if (epluses.size() == 1 && muminuses.size() == 1)
           _hist_mll_2_osof_emu->fill(m_mue/GeV, weight);
       }
 
@@ -292,17 +292,17 @@
       }
 
     }
-    
-    
-    void finalize() {  
+ 
+ 
+    void finalize() {
       /// @todo Normalisations
     }
 
-    //@}    
-    
+    //@}
+ 
 
   private:
-    
+ 
     AIDA::IHistogram1D *_hist_n_trk, *_hist_phi_trk, *_hist_eta_trk, *_hist_pt_trk;
     AIDA::IHistogram1D *_hist_n_jet, *_hist_phi_jet, *_hist_eta_jet, *_hist_pt_jet;
     AIDA::IHistogram1D *_hist_n_e, *_hist_phi_e, *_hist_eta_e, *_hist_pt_e;
@@ -311,13 +311,13 @@
     AIDA::IHistogram1D *_hist_n_gammaiso, *_hist_phi_gammaiso, *_hist_eta_gammaiso, *_hist_pt_gammaiso;
     AIDA::IHistogram1D *_hist_met;
     AIDA::IHistogram1D *_hist_mll_2_ossf_ee, *_hist_mll_2_ossf_mumu, *_hist_mll_2_osof_emu;
-    AIDA::IHistogram1D *_hist_mll_ossf_ee, *_hist_mll_ossf_mumu, *_hist_mll_osof_emu;    
+    AIDA::IHistogram1D *_hist_mll_ossf_ee, *_hist_mll_ossf_mumu, *_hist_mll_osof_emu;
     AIDA::IHistogram1D *_hist_mll_all_ossf_ee, *_hist_mll_all_ossf_mumu, *_hist_mll_all_osof_emu;
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_LHC_SUSY> plugin_MC_LHC_SUSY;
-  
+
 }

Modified: trunk/src/Analyses/MC_LHC_TTBAR.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_TTBAR.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_TTBAR.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,11 +16,11 @@
 
     /// Constructor
     MC_LHC_TTBAR()
-      : Analysis("MC_LHC_TTBAR") 
+      : Analysis("MC_LHC_TTBAR")
     {
       /// @todo Set approriate for your analysis
       setBeams(PROTON, PROTON);
-      
+   
       /// @todo Set whether your finalize method needs the generator cross section
       setNeedsCrossSection(false);
 
@@ -50,7 +50,7 @@
     void analyze(const Event& event) {
       const double weight = event.weight();
       const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
-      
+   
       foreach (const Particle& p, cfs.particles()) {
         double eta = p.momentum().pseudorapidity();
         _histPseudorapidity->fill(eta, weight);
@@ -62,7 +62,7 @@
     /// Normalise histograms etc., after the run
     void finalize() {
     scale(_histPseudorapidity, 1.0/sumOfWeights());
-      
+   
     }
 
 

Modified: trunk/src/Analyses/MC_LHC_WANALYSIS.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_WANALYSIS.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_WANALYSIS.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,19 +11,19 @@
 
   class MC_LHC_WANALYSIS : public Analysis {
   public:
-  
+
     /// Default constructor
-    MC_LHC_WANALYSIS() : Analysis("MC_LHC_WANALYSIS") 
+    MC_LHC_WANALYSIS() : Analysis("MC_LHC_WANALYSIS")
     {
       //
     }
-    
+ 
 
     /// @name Analysis methods
     /// @todo change "Weights" to differential cross sections once histos normalised to cross-section.
     //@{
 
-    void init() { 
+    void init() {
       const ChargedFinalState cfs;
       addProjection(cfs, "CFS");
       /// @todo Handle muon-decay Ws as well
@@ -49,15 +49,15 @@
       _hist_jetpt = bookHistogram1D("pt-jet", 50, 20, 100);
       _hist_jetlogpt = bookHistogram1D("logpt-jet", 20, 0, 20);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const double weight = event.weight();
       const FinalState& cfs = applyProjection<FinalState>(event, "CFS");
       const WFinder& wf = applyProjection<WFinder>(event, "WF");
       const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
       const Jets jets = fastjets.jetsByPt();
-    
+ 
       // Charged particles part
       _hist_chargemulti->fill(cfs.particles().size(), weight);
       double meanpt(0), rmspt(0);
@@ -71,7 +71,7 @@
       _hist_chargemeanpt->fill(meanpt/GeV, weight);
       rmspt = sqrt(rmspt / cfs.particles().size());
       _hist_chargermspt->fill(rmspt/GeV, weight);
-      
+   
       // W part
       _hist_wcount->fill(wf.particles().size(), weight);
       foreach (const Particle& wp, wf.particles()) {
@@ -84,7 +84,7 @@
         _hist_wmass->fill(m/GeV, weight);
         _hist_wlogmass->fill(log(m/GeV), weight);	
       }
-      
+   
       // Jet part
       _hist_jetcount->fill(fastjets.size(), weight);
       foreach(const Jet& j, fastjets.jetsByPt()) {
@@ -93,14 +93,14 @@
         _hist_jetlogpt->fill(log(pT/GeV), weight);
       }
     }
-    
-    
+ 
+ 
     void finalize() {
       ///@todo Obtain cross-sections from generator and normalise histos to them.
     }
-    
+ 
     //@}
-    
+ 
   private:
 
     /// @name Histograms

Modified: trunk/src/Analyses/MC_LHC_ZANALYSIS.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_ZANALYSIS.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_LHC_ZANALYSIS.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,19 +11,19 @@
 
   class MC_LHC_ZANALYSIS : public Analysis {
   public:
-  
+
     /// Default constructor
-    MC_LHC_ZANALYSIS() : Analysis("MC_LHC_ZANALYSIS") 
+    MC_LHC_ZANALYSIS() : Analysis("MC_LHC_ZANALYSIS")
     {
       //
     }
-    
+ 
 
     /// @name Analysis methods
     /// @todo change "Weights" to differential cross sections once histos normalised to cross-section.
     //@{
 
-    void init() { 
+    void init() {
       const ChargedFinalState cfs;
       addProjection(cfs, "CFS");
       /// @todo Handle muon-decay Zs as well
@@ -49,16 +49,16 @@
       _hist_jetpt = bookHistogram1D("pt-jet", 50, 20, 100);
       _hist_jetlogpt = bookHistogram1D("logpt-jet", 20, 0, 20);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const double weight = event.weight();
       const FinalState& cfs = applyProjection<FinalState>(event, "CFS");
       const ZFinder& zf = applyProjection<ZFinder>(event, "ZF");
       const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
       const Jets jets = fastjets.jetsByPt();
-    
-      // Charged particles part    
+ 
+      // Charged particles part
       _hist_chargemulti->fill(cfs.particles().size(), weight);
       double meanpt(0), rmspt(0);
       foreach (const Particle& p, cfs.particles()) {
@@ -71,7 +71,7 @@
       _hist_chargemeanpt->fill(meanpt/GeV, weight);
       rmspt = sqrt(rmspt / cfs.particles().size());
       _hist_chargermspt->fill(rmspt/GeV, weight);
-      
+   
       // Z part
       _hist_zcount->fill(zf.particles().size(), weight);
       foreach (const Particle& zp, zf.particles()) {
@@ -84,7 +84,7 @@
         _hist_zmass->fill(m/GeV, weight);
         _hist_zlogmass->fill(log(m/GeV), weight);	
       }
-      
+   
       // Jet part
       _hist_jetcount->fill(fastjets.size(), weight);
       foreach(const Jet& j, fastjets.jetsByPt()) {
@@ -93,14 +93,14 @@
         _hist_jetlogpt->fill(log(pT/GeV), weight);
       }
     }
-    
-    
+ 
+ 
     void finalize() {
       ///@todo Obtain cross-sections from generator and normalise histos to them.
     }
-    
+ 
     //@}
-    
+ 
 
   private:
 

Modified: trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc
==============================================================================
--- trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_TVT1960_PHOTONJETS.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,7 +9,7 @@
 
   class MC_TVT1960_PHOTONJETS : public MC_JetAnalysis {
   public:
-    
+ 
     /// Default constructor
     MC_TVT1960_PHOTONJETS()
       : MC_JetAnalysis("MC_TVT1960_PHOTONJETS", 1960.0, 4, "Jets")
@@ -17,22 +17,22 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
-    //@{ 
-    
+    //@{
+ 
     /// Book histograms
     void init() {
       // General FS
       FinalState fs(-5.0, 5.0);
       addProjection(fs, "FS");
-      
+   
       // Get leading photon
       LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
       photonfs.addParticleId(PHOTON);
       addProjection(photonfs, "LeadingPhoton");
-      
+   
       // FS for jets excludes the leading photon
       VetoedFinalState vfs(fs);
       vfs.addVetoOnThisFinalState(photonfs);
@@ -45,28 +45,28 @@
       _h_photon_jet1_deta = bookHistogram1D("photon_jet1_deta", 50, -5.0, 5.0);
       _h_photon_jet1_dphi = bookHistogram1D("photon_jet1_dphi", 20, 0.0, M_PI);
       _h_photon_jet1_dR = bookHistogram1D("photon_jet1_dR", 25, 0.5, 7.0);
-      
+   
       MC_JetAnalysis::init();
     }
-    
+ 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event& e) {
       const double weight = e.weight();
-    
+ 
       // Get the photon
       const ParticleVector photons = applyProjection<FinalState>(e, "LeadingPhoton").particles();
       if (photons.size() != 1) {
         vetoEvent;
       }
       const FourMomentum photon = photons.front().momentum();
-      
+   
       // Get all charged particles
       const FinalState& fs = applyProjection<FinalState>(e, "JetFS");
       if (fs.empty()) {
         vetoEvent;
       }
-      
+   
       // Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
       const double egamma = photon.E();
       double econe = 0.0;
@@ -79,10 +79,10 @@
           }
         }
       }
-      
+   
       _h_photon_pT->fill(photon.pT(),weight);
       _h_photon_y->fill(photon.rapidity(),weight);
-      
+   
       const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
       const Jets& jets = jetpro.jetsByPt(20.0*GeV);
       if (jets.size()>0) {
@@ -90,11 +90,11 @@
         _h_photon_jet1_dphi->fill(mapAngle0ToPi(photon.phi()-jets[0].momentum().phi()), weight);
         _h_photon_jet1_dR->fill(deltaR(photon, jets[0].momentum()), weight);
       }
-      
+   
       MC_JetAnalysis::analyze(e);
     }
-    
-    
+ 
+ 
     // Finalize
     void finalize() {
       scale(_h_photon_pT, crossSection()/sumOfWeights());
@@ -102,7 +102,7 @@
       scale(_h_photon_jet1_deta, crossSection()/sumOfWeights());
       scale(_h_photon_jet1_dphi, crossSection()/sumOfWeights());
       scale(_h_photon_jet1_dR, crossSection()/sumOfWeights());
-      
+   
       MC_JetAnalysis::finalize();
     }
 
@@ -121,8 +121,8 @@
     //@}
 
   };
-    
-    
+ 
+ 
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_TVT1960_PHOTONJETS> plugin_MC_TVT1960_PHOTONJETS;

Modified: trunk/src/Analyses/MC_TVT1960_ZJETS.cc
==============================================================================
--- trunk/src/Analyses/MC_TVT1960_ZJETS.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/MC_TVT1960_ZJETS.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,10 +20,10 @@
       setBeams(PROTON, ANTIPROTON);
       setNeedsCrossSection(true);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book histograms
     void init() {
@@ -39,21 +39,21 @@
       _h_Z_jet1_dR = bookHistogram1D("Z_jet1_dR", 25, 0.5, 7.0);
       _h_lepton_pT = bookHistogram1D("lepton_pT", 100, 0.0, 500.0);
       _h_lepton_eta = bookHistogram1D("lepton_eta", 40, -4.0, 4.0);
-      
+   
       MC_JetAnalysis::init();
     }
-    
+ 
 
-    
-    /// Do the analysis 
+ 
+    /// Do the analysis
     void analyze(const Event & e) {
       const double weight = e.weight();
-      
+   
       const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
       if (zfinder.particles().size()!=1) {
         vetoEvent;
       }
-      
+   
       FourMomentum zmom(zfinder.particles()[0].momentum());
       _h_Z_mass->fill(zmom.mass(),weight);
       _h_Z_pT->fill(zmom.pT(),weight);
@@ -62,17 +62,17 @@
         _h_lepton_pT->fill(l.momentum().pT(), weight);
         _h_lepton_eta->fill(l.momentum().eta(), weight);
       }
-      
+   
       const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
       const Jets& jets = jetpro.jetsByPt(20.0*GeV);
       if (jets.size() > 0) {
         _h_Z_jet1_deta->fill(zmom.eta()-jets[0].momentum().eta(), weight);
         _h_Z_jet1_dR->fill(deltaR(zmom, jets[0].momentum()), weight);
       }
-      
+   
       MC_JetAnalysis::analyze(e);
     }
-    
+ 
 
     /// Finalize
     void finalize() {
@@ -83,10 +83,10 @@
       scale(_h_Z_jet1_dR, crossSection()/sumOfWeights());
       scale(_h_lepton_pT, crossSection()/sumOfWeights());
       scale(_h_lepton_eta, crossSection()/sumOfWeights());
-      
+   
       MC_JetAnalysis::finalize();
     }
-    
+ 
     //@}
 
 
@@ -105,9 +105,9 @@
 
   };
 
-  
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<MC_TVT1960_ZJETS> plugin_MC_TVT1960_ZJETS;
-  
+
 }

Modified: trunk/src/Analyses/Makefile.am
==============================================================================
--- trunk/src/Analyses/Makefile.am	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/Makefile.am	Thu Nov 19 15:02:51 2009	(r2080)
@@ -79,7 +79,7 @@
     DELPHI_1996_S3430090.cc \
     DELPHI_2002_069_CONF_603.cc \
     OPAL_1998_S3780481.cc \
-    OPAL_2004_S6132243.cc 
+    OPAL_2004_S6132243.cc
 
 
 lib_LTLIBRARIES += RivetRHICAnalyses.la

Modified: trunk/src/Analyses/OPAL_1998_S3780481.cc
==============================================================================
--- trunk/src/Analyses/OPAL_1998_S3780481.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/OPAL_1998_S3780481.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,11 +14,11 @@
   /// @author Hendrik Hoeth
   class OPAL_1998_S3780481 : public Analysis {
   public:
-    
+ 
     /// Constructor
     OPAL_1998_S3780481() : Analysis("OPAL_1998_S3780481")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
 
       // Counters
       _weightedTotalPartNum = 0;
@@ -26,8 +26,8 @@
       _SumOfcWeights = 0;
       _SumOfbWeights = 0;
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -35,27 +35,27 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed ncharged cut" << endl;
         vetoEvent;
       }
       getLog() << Log::DEBUG << "Passed ncharged cut" << endl;
-      
+   
       // Get event weight for histo filling
       const double weight = e.weight();
       _weightedTotalPartNum += numParticles * weight;
-      
+   
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom << endl;
-      
+   
       int flavour = 0;
       const InitialQuarks& iqf = applyProjection<InitialQuarks>(e, "IQF");
-      
+   
       // If we only have two quarks (qqbar), just take the flavour.
       // If we have more than two quarks, look for the highest energetic q-qbar pair.
       if (iqf.particles().size() == 2) {
@@ -75,7 +75,7 @@
           }
         }
       }
-      
+   
       switch (flavour) {
       case 1:
       case 2:
@@ -89,7 +89,7 @@
         _SumOfbWeights += weight;
         break;
       }
-      
+   
       foreach (const Particle& p, fs.particles()) {
         const double xp = p.momentum().vector3().mod()/meanBeamMom;
         const double logxp = -std::log(xp);
@@ -117,10 +117,10 @@
           break;
         }
       }
-      
+   
     }
-    
-    
+ 
+ 
     void init() {
       // Projections
       addProjection(Beam(), "Beams");
@@ -141,8 +141,8 @@
       _histMultiChargedb   = bookHistogram1D(9, 1, 3);
       _histMultiChargedall = bookHistogram1D(9, 1, 4);
     }
-    
-    
+ 
+ 
     /// Finalize
     void finalize() {
       const double avgNumParts = _weightedTotalPartNum / sumOfWeights();
@@ -154,20 +154,20 @@
       normalize(_histLogXpc   , avgNumParts);
       normalize(_histLogXpb   , avgNumParts);
       normalize(_histLogXpall , avgNumParts);
-      
+   
       scale(_histMultiChargeduds, 1.0/_SumOfudsWeights);
       scale(_histMultiChargedc  , 1.0/_SumOfcWeights);
       scale(_histMultiChargedb  , 1.0/_SumOfbWeights);
       scale(_histMultiChargedall, 1.0/sumOfWeights());
     }
-    
+ 
     //@}
 
 
   private:
 
     /// Store the weighted sums of numbers of charged / charged+neutral
-    /// particles - used to calculate average number of particles for the 
+    /// particles - used to calculate average number of particles for the
     /// inclusive single particle distributions' normalisations.
     double _weightedTotalPartNum;
 
@@ -192,9 +192,9 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<OPAL_1998_S3780481> plugin_OPAL_1998_S3780481;
-  
+
 }

Modified: trunk/src/Analyses/OPAL_2004_S6132243.cc
==============================================================================
--- trunk/src/Analyses/OPAL_2004_S6132243.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/OPAL_2004_S6132243.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,11 +13,11 @@
 namespace Rivet {
 
 
-  class OPAL_2004_S6132243 : public Analysis { 
+  class OPAL_2004_S6132243 : public Analysis {
   public:
 
     /// Constructor
-    OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243") { 
+    OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243") {
       //
     }
 
@@ -25,7 +25,7 @@
     /// @name Analysis methods
     //@{
 
-    void init() { 
+    void init() {
       // Projections
       addProjection(Beam(), "Beams");
       const ChargedFinalState cfs;
@@ -74,7 +74,7 @@
     }
 
 
-    void analyze(const Event& event) { 
+    void analyze(const Event& event) {
       const FinalState& cfs = applyProjection<FinalState>(event, "FS");
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (cfs.size() < 2) vetoEvent;
@@ -103,8 +103,8 @@
       // Thrusts
       const Thrust& thrust = applyProjection<Thrust>(event, "Thrust");
       _hist1MinusT[ih]->fill(1-thrust.thrust(), weight);
-      _histTMajor[ih]->fill(thrust.thrustMajor(), weight); 
-      _histTMinor[ih]->fill(thrust.thrustMinor(), weight); 
+      _histTMajor[ih]->fill(thrust.thrustMajor(), weight);
+      _histTMinor[ih]->fill(thrust.thrustMinor(), weight);
       _histOblateness[ih]->fill(thrust.oblateness(), weight);
       for (int n = 1; n <= 5; ++n) {
         _hist1MinusTMom[ih]->fill(n, pow(1-thrust.thrust(), n)*weight);
@@ -143,7 +143,7 @@
       for (int n = 1; n <= 5; ++n) {
         _histCParamMom[ih]->fill(n, pow(cparam, n)*weight);
       }
-      
+   
       // Hemispheres
       const Hemispheres& hemi = applyProjection<Hemispheres>(event, "Hemispheres");
       const double hemi_mh = hemi.scaledM2high();
@@ -166,36 +166,36 @@
     }
 
 
-    void finalize() { 
-      /// @todo Normalisations / scalings, etc.    
+    void finalize() {
+      /// @todo Normalisations / scalings, etc.
       for (int isqrts = 0; isqrts < 4; ++isqrts) {
-        normalize(_hist1MinusT[isqrts]); 
-        normalize(_histTMajor[isqrts]); 
-        normalize(_histTMinor[isqrts]); 
-        normalize(_histOblateness[isqrts]); 
-        normalize(_histSphericity[isqrts]); 
-        normalize(_histAplanarity[isqrts]); 
-        normalize(_histHemiMassH[isqrts]); 
-        normalize(_histHemiMassL[isqrts]); 
-        normalize(_histHemiBroadW[isqrts]); 
-        normalize(_histHemiBroadN[isqrts]); 
-        normalize(_histHemiBroadT[isqrts]); 
+        normalize(_hist1MinusT[isqrts]);
+        normalize(_histTMajor[isqrts]);
+        normalize(_histTMinor[isqrts]);
+        normalize(_histOblateness[isqrts]);
+        normalize(_histSphericity[isqrts]);
+        normalize(_histAplanarity[isqrts]);
+        normalize(_histHemiMassH[isqrts]);
+        normalize(_histHemiMassL[isqrts]);
+        normalize(_histHemiBroadW[isqrts]);
+        normalize(_histHemiBroadN[isqrts]);
+        normalize(_histHemiBroadT[isqrts]);
         normalize(_histCParam[isqrts]);
         normalize(_histDParam[isqrts]);
         normalize(_histY23Durham[isqrts]);
         //
         scale(_hist1MinusTMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histTMajorMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histTMinorMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histOblatenessMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histSphericityMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histHemiMassHMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histHemiMassLMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histHemiBroadWMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histHemiBroadNMom[isqrts], 1.0/_sumPassedWeights); 
-        scale(_histHemiBroadTMom[isqrts], 1.0/_sumPassedWeights); 
+        scale(_histTMajorMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histTMinorMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histOblatenessMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histSphericityMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histHemiMassHMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histHemiMassLMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histHemiBroadWMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histHemiBroadNMom[isqrts], 1.0/_sumPassedWeights);
+        scale(_histHemiBroadTMom[isqrts], 1.0/_sumPassedWeights);
         scale(_histCParamMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histY23DurhamMom[isqrts], 1.0/_sumPassedWeights); 
+        scale(_histY23DurhamMom[isqrts], 1.0/_sumPassedWeights);
       }
     }
 

Modified: trunk/src/Analyses/PDG_Hadron_Multiplicities.cc
==============================================================================
--- trunk/src/Analyses/PDG_Hadron_Multiplicities.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/PDG_Hadron_Multiplicities.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,11 +14,11 @@
   /// @author Hendrik Hoeth
   class PDG_HADRON_MULTIPLICITIES : public Analysis {
   public:
-    
+ 
     /// Constructor
     PDG_HADRON_MULTIPLICITIES() : Analysis("PDG_HADRON_MULTIPLICITIES")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
     }
 
 
@@ -29,7 +29,7 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
@@ -42,7 +42,7 @@
 
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom/GeV << " GeV" << endl;
 

Modified: trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc
==============================================================================
--- trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/PDG_Hadron_Multiplicities_Ratios.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,19 +14,19 @@
   /// @author Holger Schulz
   class PDG_HADRON_MULTIPLICITIES_RATIOS : public Analysis {
   public:
-    
+ 
     /// Constructor
     PDG_HADRON_MULTIPLICITIES_RATIOS() : Analysis("PDG_HADRON_MULTIPLICITIES_RATIOS")
     {
-      setBeams(ELECTRON, POSITRON); 
+      setBeams(ELECTRON, POSITRON);
 
       _weightedTotalNumPiPlus10 = 0;
       _weightedTotalNumPiPlus32 = 0;
       _weightedTotalNumPiPlus91 = 0;
       _weightedTotalNumPiPlus165 = 0;
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
     //@{
 
@@ -34,20 +34,20 @@
       // First, veto on leptonic events by requiring at least 4 charged FS particles
       const FinalState& fs = applyProjection<FinalState>(e, "FS");
       const size_t numParticles = fs.particles().size();
-      
+   
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
       if (numParticles < 2) {
         getLog() << Log::DEBUG << "Failed leptonic event cut" << endl;
         vetoEvent;
       }
       getLog() << Log::DEBUG << "Passed leptonic event cut" << endl;
-      
+   
       // Get event weight for histo filling
       const double weight = e.weight();
 
       // Get beams and average beam momentum
       const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
-      const double meanBeamMom = ( beams.first.momentum().vector3().mod() + 
+      const double meanBeamMom = ( beams.first.momentum().vector3().mod() +
                                    beams.second.momentum().vector3().mod() ) / 2.0;
       getLog() << Log::DEBUG << "Avg beam momentum = " << meanBeamMom/GeV << " GeV" << endl;
 

Modified: trunk/src/Analyses/SFM_1984_S1178091.cc
==============================================================================
--- trunk/src/Analyses/SFM_1984_S1178091.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/SFM_1984_S1178091.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -25,48 +25,48 @@
       addProjection(ChargedFinalState(), "FS");
 
       // Histograms
-      _hist_multiplicity_inel_30 = bookHistogram1D(1, 1, 1); 
+      _hist_multiplicity_inel_30 = bookHistogram1D(1, 1, 1);
       _hist_multiplicity_inel_45 = bookHistogram1D(1, 1, 2);
       _hist_multiplicity_inel_53 = bookHistogram1D(1, 1, 3);
       _hist_multiplicity_inel_63 = bookHistogram1D(1, 1, 4);
       _hist_multiplicity_nsd_30 = bookHistogram1D(2, 1, 1);
       _hist_multiplicity_nsd_45 = bookHistogram1D(2, 1, 2);
       _hist_multiplicity_nsd_53 = bookHistogram1D(2, 1, 3);
-      _hist_multiplicity_nsd_63 = bookHistogram1D(2, 1, 4);  
+      _hist_multiplicity_nsd_63 = bookHistogram1D(2, 1, 4);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       const double weight = event.weight();
       const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
       const ChargedFinalState& fs = applyProjection<ChargedFinalState>(event, "FS");
       const size_t numParticles = fs.particles().size();
-      
-      // Decide whether event is of diffractive type or not 
+   
+      // Decide whether event is of diffractive type or not
       // FIXME: it is not so clear in the paper how this distinction is made.
       // They seem to require either exactly one particle with Feynman x larger
       // than 0.8 to call an event diffractive or that there are no tracks
       // reconstructed in either of the two hemispheres. For the latter
       // they require in addition also the number of cahrged particles
       // to be smaller than 8.
-      
+   
       int n_left(0), n_right(0), n_large_x(0);
       foreach (const Particle& p, fs.particles()) {
-        // Calculate the particles' Feynman x  
+        // Calculate the particles' Feynman x
         const double x_feyn = 2.0 * (p.momentum().pz()/GeV) / sqrtS;
         if (fabs(x_feyn) > 0.8 ) n_large_x += 1;
-        
+     
         // Pseudorapidity
         const double eta = p.momentum().pseudorapidity();
         if (eta > 0.0) n_right += 1;
         else if (eta < 0.0) n_left += 1;
       }
-      
+   
       // Not sure about the "=="
       /// @todo Numerical precision problem!
       bool isDiffractive = false;
       if (n_large_x == 1) isDiffractive = true;
-      
+   
       // FIXME: Not sure about the "== 1", the paper says no charged particle
       // that was reconstructed so the incoming protons must run down the beam
       // pipe. Since we look a the complete final state here no particle being
@@ -75,11 +75,11 @@
       if ((n_left == 1 || n_right == 1) && numParticles < 8 ) {
         isDiffractive = true;
       }
-      
-      getLog() << Log::DEBUG << "N_left: " << n_left << ", N_right: " 
+   
+      getLog() << Log::DEBUG << "N_left: " << n_left << ", N_right: "
                << n_right << ", N_large_x: " << n_large_x << endl;
-      
-      
+   
+   
       // Fill histos of charged multiplicity distributions
       // The inelastic samples are said to contain also diffractive events.
       //
@@ -89,15 +89,15 @@
           _hist_multiplicity_inel_30->fill(numParticles, weight);
         } else {
           _hist_multiplicity_inel_30->fill(numParticles, weight);
-        }  
-      } 
+        }
+      }
       else if (fuzzyEquals(sqrtS, 44/GeV, 1E-1)) {
         if (isDiffractive) {
           _hist_multiplicity_nsd_45 ->fill(numParticles, weight);
           _hist_multiplicity_inel_45->fill(numParticles, weight);
         } else {
           _hist_multiplicity_inel_45->fill(numParticles, weight);
-        }  
+        }
       }
       else if (fuzzyEquals(sqrtS, 53/GeV, 1E-1)) {
         if (isDiffractive) {
@@ -105,7 +105,7 @@
           _hist_multiplicity_inel_53->fill(numParticles, weight);
         } else {
           _hist_multiplicity_inel_53->fill(numParticles, weight);
-        }  
+        }
       }
       else if (fuzzyEquals(sqrtS, 63/GeV, 1E-1)) {
         if (isDiffractive) {
@@ -114,12 +114,12 @@
         }
         else {
           _hist_multiplicity_inel_63->fill(numParticles, weight);
-        }  
+        }
       }
-      
+   
     }
-    
-    
+ 
+ 
     void finalize() {
       normalize(_hist_multiplicity_inel_30);
       normalize(_hist_multiplicity_inel_45);
@@ -131,10 +131,10 @@
       normalize(_hist_multiplicity_nsd_63 );
     }
     //@}
-    
+ 
 
   private:
-    
+ 
     /// @name Histograms
     //@{
 

Modified: trunk/src/Analyses/STAR_2008_S7993412.cc
==============================================================================
--- trunk/src/Analyses/STAR_2008_S7993412.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/STAR_2008_S7993412.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,10 +14,10 @@
     {
       setBeams(PROTON, PROTON);
     }
-    
-    
+ 
+ 
     /// @name Analysis methods
-    //@{ 
+    //@{
 
     /// Book projections and histograms
     void init() {
@@ -29,7 +29,7 @@
     }
 
 
-    /// Do the analysis 
+    /// Do the analysis
     void analyze(const Event& event) {
       // Skip if the event is empty
       const FinalState& fs = applyProjection<FinalState>(event, "FS");
@@ -38,7 +38,7 @@
                  << " because no final state found " << endl;
         vetoEvent;
       }
-      
+   
       const double weight = event.weight();
 
       foreach (const Particle& tp, fs.particles()) {
@@ -57,8 +57,8 @@
         }
       }
     }
-    
-    
+ 
+ 
     /// Finalize
     void finalize() {
       /// @todo Use the generator cross-section
@@ -66,7 +66,7 @@
       //normalize(_h_jet_pT_MB, 16603100);
       //normalize(_h_jet_pT_HT, 1808234);
     }
-    
+ 
     //@}
 
 
@@ -80,9 +80,9 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<STAR_2008_S7993412> plugin_STAR_2008_S7993412;
-  
+
 }

Modified: trunk/src/Analyses/UA1_1990_S2044935.cc
==============================================================================
--- trunk/src/Analyses/UA1_1990_S2044935.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/UA1_1990_S2044935.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -23,13 +23,13 @@
       _sumwTrig40 = 0;
       _sumwTrig80 = 0;
     }
-    
+ 
 
     /// @name Analysis methods
     //@{
 
     /// Book projections and histograms
-    void init() { 
+    void init() {
       addProjection(ChargedFinalState(-5.5, 5.5), "TriggerFS");
       addProjection(ChargedFinalState(-2.5, 2.5), "TrackFS");
       addProjection(Beam(), "Beam");
@@ -51,11 +51,11 @@
       _hist_Pt63 = bookProfile1D(8,1,1);
       _hist_Pt200 = bookProfile1D(6,1,1);
       _hist_Pt900 = bookProfile1D(7,1,1);
-      _hist_Etavg200 = bookProfile1D(12,1,1); 
-      _hist_Etavg500 = bookProfile1D(12,1,2); 
+      _hist_Etavg200 = bookProfile1D(12,1,1);
+      _hist_Etavg500 = bookProfile1D(12,1,2);
       _hist_Etavg900 = bookProfile1D(12,1,3);
     }
-    
+ 
 
     void analyze(const Event& event) {
       // Trigger
@@ -67,7 +67,7 @@
         else if (inRange(eta, 1.5, 5.5)) n_plus++;
       }
       getLog() << Log::DEBUG << "Trigger -: " << n_minus << ", Trigger +: " << n_plus << endl;
-      if (n_plus == 0 || n_minus == 0) vetoEvent;      
+      if (n_plus == 0 || n_minus == 0) vetoEvent;
       const double weight = event.weight();
       _sumwTrig += weight;
 
@@ -121,12 +121,12 @@
             _sumwTrig80 += weight;
             _hist_Esigd3p80->fill(pt/GeV, scaled_weight);
           }
-        } 
+        }
       }
-      
+   
     }
-    
-    
+ 
+ 
     void finalize() {
       const double xsec = crossSection();
       if (_sumwTrig > 0) {
@@ -150,15 +150,15 @@
         normalize(_hist_Et900, xsec/millibarn * _sumwTrig/sumOfWeights());
       }
     }
-    
+ 
     //@}
 
-    
+ 
   private:
 
     /// Weight counters
     double _sumwTrig, _sumwTrig08, _sumwTrig40, _sumwTrig80;
-    
+ 
     /// @name Histogram collections
     //@{
     AIDA::IHistogram1D* _hist_Nch200;
@@ -180,12 +180,12 @@
     AIDA::IHistogram1D* _hist_Et500;
     AIDA::IHistogram1D* _hist_Et900;
     //@}
-    
+ 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<UA1_1990_S2044935> plugin_UA1_1990_S2044935;
-  
+
 }

Modified: trunk/src/Analyses/UA5_1982_S875503.cc
==============================================================================
--- trunk/src/Analyses/UA5_1982_S875503.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/UA5_1982_S875503.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,17 +10,17 @@
 
   class UA5_1982_S875503 : public Analysis {
   public:
-    
+ 
     /// Default constructor
     UA5_1982_S875503() : Analysis("UA5_1982_S875503") {
       //
     }
-  
+
 
     /// @name Analysis methods
     //@{
 
-    void init() { 
+    void init() {
       addProjection(TriggerUA5(), "Trigger");
       addProjection(ChargedFinalState(-3.5, 3.5), "CFS");
 
@@ -29,21 +29,21 @@
       _hist_eta_pp    = bookHistogram1D(3,1,1);
       _hist_eta_ppbar = bookHistogram1D(4,1,1);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       // Trigger
       const TriggerUA5& trigger = applyProjection<TriggerUA5>(event, "Trigger");
       if (!trigger.nsdDecision()) vetoEvent;
 
       // Get tracks
-      const double weight = event.weight(); 
+      const double weight = event.weight();
       const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(event, "CFS");
-      
+   
       // Fill mean charged multiplicity histos
       if (trigger.samebeams()) { // PP
         _hist_nch_pp->fill(_hist_nch_pp->binMean(0), cfs.size());
-      } else { // PPbar 
+      } else { // PPbar
         _hist_nch_ppbar->fill(_hist_nch_ppbar->binMean(0), cfs.size());
       }
 
@@ -55,10 +55,10 @@
           _hist_eta_ppbar->fill(fabs(p.momentum().eta()), weight);
         }
       }
-            
+         
     }
-    
-    
+ 
+ 
     void finalize() {
       scale(_hist_nch_pp,    1.0/sumOfWeights());
       scale(_hist_nch_ppbar, 1.0/sumOfWeights());
@@ -67,10 +67,10 @@
     }
 
     //@}
-    
-  
+ 
+
   private:
-    
+ 
     /// @name Histogram collections
     //@{
     AIDA::IHistogram1D* _hist_nch_pp;
@@ -78,12 +78,12 @@
     AIDA::IHistogram1D* _hist_eta_pp;
     AIDA::IHistogram1D* _hist_eta_ppbar;
     //@}
-    
+ 
   };
-  
-  
-  
+
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<UA5_1982_S875503> plugin_UA5_1982_S875503;
-  
+
 }

Modified: trunk/src/Analyses/UA5_1986_S1583476.cc
==============================================================================
--- trunk/src/Analyses/UA5_1986_S1583476.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/UA5_1986_S1583476.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,15 +15,15 @@
     UA5_1986_S1583476() : Analysis("UA5_1986_S1583476") {
       setBeams(PROTON, ANTIPROTON);
     }
-    
+ 
 
 
     /// @name Analysis methods
     //@{
-    
+ 
     void init() {
       addProjection(TriggerUA5(), "Trigger");
-      addProjection(Beam(), "Beams"); 
+      addProjection(Beam(), "Beams");
       addProjection(ChargedFinalState(-5.0, 5.0), "CFS50");
 
       // Histograms
@@ -31,14 +31,14 @@
       _hist_eta_inelastic_200 = bookHistogram1D(1,1,2);
       _hist_eta_nsd_900       = bookHistogram1D(1,1,3);
       _hist_eta_inelastic_900 = bookHistogram1D(1,1,4);
-      
+   
       _hist_eta_nsd_n_2_200  = bookHistogram1D(2,1,1);
       _hist_eta_nsd_n_12_200 = bookHistogram1D(2,1,2);
       _hist_eta_nsd_n_22_200 = bookHistogram1D(2,1,3);
       _hist_eta_nsd_n_32_200 = bookHistogram1D(2,1,4);
       _hist_eta_nsd_n_42_200 = bookHistogram1D(2,1,5);
       _hist_eta_nsd_n_52_200 = bookHistogram1D(2,1,6);
-      
+   
       _hist_eta_nsd_n_2_900  = bookHistogram1D(3,1,1);
       _hist_eta_nsd_n_12_900 = bookHistogram1D(3,1,2);
       _hist_eta_nsd_n_22_900 = bookHistogram1D(3,1,3);
@@ -49,8 +49,8 @@
       _hist_eta_nsd_n_72_900 = bookHistogram1D(3,1,8);
       _hist_eta_nsd_n_82_900 = bookHistogram1D(3,1,9);
     }
-    
-    
+ 
+ 
     void analyze(const Event& event) {
       // Trigger
       const TriggerUA5& trigger = applyProjection<TriggerUA5>(event, "Trigger");
@@ -60,12 +60,12 @@
       const double weight = event.weight();
       const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS();
 
-      // Iterate over particles in |eta| < 5.0 and fill histos with |eta| 
+      // Iterate over particles in |eta| < 5.0 and fill histos with |eta|
       const ChargedFinalState& cfs50 = applyProjection<ChargedFinalState>(event, "CFS50");
       const unsigned int numP = cfs50.size();
       foreach (const Particle& p, cfs50.particles()) {
         double eta = fabs(p.momentum().pseudorapidity());
-        
+     
         // Fill 200 GeV histos
         if (fuzzyEquals(sqrtS/GeV, 200.0, 1E-4)) {
           // Fill histos that don't require a certain multiplicity
@@ -81,7 +81,7 @@
             else if ( numP >= 52 ) _hist_eta_nsd_n_52_200->fill(eta, weight);
           }
         }
-        
+     
         // Fill 900 GeV histos
         else if (fuzzyEquals(sqrtS/GeV, 900.0, 1E-4)) {
           // Fill histos that don't require a certain multiplicity
@@ -99,25 +99,25 @@
             else if ( ( 72 <= numP ) && ( numP <= 80 ) ) _hist_eta_nsd_n_72_900->fill(eta, weight);
             else if ( numP >= 82 ) _hist_eta_nsd_n_82_900->fill(eta, weight);
           }
-        } 
-      }        
+        }
+      }
   }
-  
-  
+
+
     void finalize() {
       // Scale histos to the area of the corresponding reference histos
-      normalize(_hist_eta_nsd_200, 10.2225);    
+      normalize(_hist_eta_nsd_200, 10.2225);
       normalize(_hist_eta_inelastic_200, 9.255);
       normalize(_hist_eta_nsd_900, 15.285);
       normalize(_hist_eta_inelastic_900, 13.9725);
-      
+   
       normalize(_hist_eta_nsd_n_2_200, 3.285);
       normalize(_hist_eta_nsd_n_12_200, 7.34);
       normalize(_hist_eta_nsd_n_22_200, 12.02);
       normalize(_hist_eta_nsd_n_32_200, 17.2);
       normalize(_hist_eta_nsd_n_42_200, 21.99);
       normalize(_hist_eta_nsd_n_52_200, 27.8);
-      
+   
       normalize(_hist_eta_nsd_n_2_900, 2.7);
       normalize(_hist_eta_nsd_n_12_900, 6.425);
       normalize(_hist_eta_nsd_n_22_900, 10.54);
@@ -126,9 +126,9 @@
       normalize(_hist_eta_nsd_n_52_900, 25.13);
       normalize(_hist_eta_nsd_n_62_900, 29.235);
       normalize(_hist_eta_nsd_n_72_900, 33.81);
-      normalize(_hist_eta_nsd_n_82_900, 41.75);      
+      normalize(_hist_eta_nsd_n_82_900, 41.75);
     }
-    
+ 
 
   private:
 
@@ -161,10 +161,10 @@
     //@}
 
   };
- 
 
-  
+
+
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<UA5_1986_S1583476> plugin_UA5_1986_S1583476;
-  
+
 }

Modified: trunk/src/Analyses/UA5_1988_S1867512.cc
==============================================================================
--- trunk/src/Analyses/UA5_1988_S1867512.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/UA5_1988_S1867512.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -213,7 +213,7 @@
     
   private:
    
-    // 
+    // CoM energy
     double sqrtS;
 
     /// @name Vectors for storing the number of particles in the different eta intervals per event.

Modified: trunk/src/Analyses/UA5_1989_S1926373.cc
==============================================================================
--- trunk/src/Analyses/UA5_1989_S1926373.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/UA5_1989_S1926373.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,9 +11,9 @@
 
   class UA5_1989_S1926373 : public Analysis {
   public:
-    
+ 
     /// Constructor
-    UA5_1989_S1926373() : Analysis("UA5_1989_S1926373") { 
+    UA5_1989_S1926373() : Analysis("UA5_1989_S1926373") {
       setBeams(PROTON, ANTIPROTON);
       _numVetoed = 0;
     }
@@ -32,7 +32,7 @@
       addProjection(ChargedFinalState(-5.0, 5.0), "CFS50");
 
       // NB. _hist_nch{200,900} and _hist_nch{200,900}eta50 use the same data but different binning
-      _hist_nch200       = bookHistogram1D(1, 1, 1); 
+      _hist_nch200       = bookHistogram1D(1, 1, 1);
       _hist_nch900       = bookHistogram1D(2, 1, 1);
       _hist_nch200eta05  = bookHistogram1D(3, 1, 1);
       _hist_nch200eta15  = bookHistogram1D(4, 1, 1);
@@ -42,13 +42,13 @@
       _hist_nch900eta15  = bookHistogram1D(8, 1, 1);
       _hist_nch900eta30  = bookHistogram1D(9, 1, 1);
       _hist_nch900eta50  = bookHistogram1D(10, 1, 1);
-      _hist_mean_nch_200 = bookHistogram1D(11, 1, 1); 
+      _hist_mean_nch_200 = bookHistogram1D(11, 1, 1);
       _hist_mean_nch_900 = bookHistogram1D(12, 1, 1);
 
       /// @todo Moments of distributions
-    } 
-    
-    
+    }
+ 
+ 
     /// Do the analysis
     void analyze(const Event& event) {
       // Trigger
@@ -57,13 +57,13 @@
 
       const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS();
       const double weight = event.weight();
-      
+   
       // Count final state particles in several eta regions
       const int numP05 = applyProjection<ChargedFinalState>(event, "CFS05").size();
       const int numP15 = applyProjection<ChargedFinalState>(event, "CFS15").size();
       const int numP30 = applyProjection<ChargedFinalState>(event, "CFS30").size();
       const int numP50 = applyProjection<ChargedFinalState>(event, "CFS50").size();
-      
+   
       // Fill histograms
       if (fuzzyEquals(sqrtS/GeV, 200.0, 1E-4)) {
         _hist_nch200->fill(numP50, weight);
@@ -82,9 +82,9 @@
         _hist_mean_nch_900->fill(_hist_mean_nch_900->binMean(0), numP50);
       }
     }
-    
-    
-    
+ 
+ 
+ 
     void finalize() {
       // Normalise to area of refhistos
       /// @todo Use generator cross-sections
@@ -101,7 +101,7 @@
       // Scale to total number of weights
       scale(_hist_mean_nch_200, 1.0/sumOfWeights());
       scale(_hist_mean_nch_900, 1.0/sumOfWeights());
-      
+   
       // Print trigger statistics
       getLog() << Log::INFO << "No. events vetoed: " << _numVetoed << endl;
       getLog() << Log::INFO << "No. events accepted: " << sumOfWeights() - _numVetoed << endl;
@@ -112,7 +112,7 @@
 
 
   private:
-    
+ 
     //@{
     /// Histograms
     AIDA::IHistogram1D* _hist_nch200;
@@ -132,7 +132,7 @@
     unsigned int _numVetoed;
   };
 
-  
+
 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<UA5_1989_S1926373> plugin_UA5_1989_S1926373;

Modified: trunk/src/Analyses/ZEUS_2001_S4815815.cc
==============================================================================
--- trunk/src/Analyses/ZEUS_2001_S4815815.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analyses/ZEUS_2001_S4815815.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,16 +9,16 @@
 
   /// @brief ZEUS dijet photoproduction study used in the ZEUS Jets PDF fit
   ///
-  /// This class is a reproduction of the HZTool routine for the ZEUS 
-  /// dijet photoproduction paper which was used in the ZEUS Jets PDF fit.  
+  /// This class is a reproduction of the HZTool routine for the ZEUS
+  /// dijet photoproduction paper which was used in the ZEUS Jets PDF fit.
   ///
   /// @author Jon Butterworth
   class ZEUS_2001_S4815815 : public Analysis {
   public:
 
     /// Constructor
-    ZEUS_2001_S4815815() : Analysis("ZEUS_2001_S4815815") 
-    { 
+    ZEUS_2001_S4815815() : Analysis("ZEUS_2001_S4815815")
+    {
       setBeams(POSITRON, PROTON);
     }
 
@@ -32,7 +32,7 @@
       addProjection(fs, "FS");
       /// @todo This is the *wrong* jet def: correct it!
       addProjection(FastJets(fs, FastJets::KT, 0.7), "Jets");
-      getLog() << Log::WARN << "This analysis uses the wrong jet definition: the " 
+      getLog() << Log::WARN << "This analysis uses the wrong jet definition: the "
                << "paper just says 'a cone algorithm was applied to the CAL cells and jets "
                << "were reconstructed using the energies and positions of these cells'" << endl;
 
@@ -50,10 +50,10 @@
         _histJetEt1->fill(j.momentum().pT(), weight);
       }
     }
-    
-    
+ 
+ 
     // Finalize
-    void finalize() { 
+    void finalize() {
       //
     }
 
@@ -69,8 +69,8 @@
 
   };
 
-    
-    
+ 
+ 
   // This global object acts as a hook for the plugin system
   AnalysisBuilder<ZEUS_2001_S4815815> plugin_ZEUS_2001_S4815815;
 

Modified: trunk/src/Analysis.cc
==============================================================================
--- trunk/src/Analysis.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Analysis.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,7 +11,7 @@
 namespace Rivet {
 
 
-  Analysis::Analysis(const string& name) 
+  Analysis::Analysis(const string& name)
     : _gotCrossSection(false),
       _needsCrossSection(false),
       _analysishandler(0),
@@ -23,10 +23,10 @@
     setBeams(ANY, ANY);
   }
 
-  
+
   Analysis::~Analysis()
   {  }
-  
+
 
   IAnalysisFactory& Analysis::analysisFactory() {
     return handler().analysisFactory();
@@ -72,13 +72,13 @@
   }
 
 
-  size_t Analysis::numEvents() const { 
-    return handler().numEvents(); 
+  size_t Analysis::numEvents() const {
+    return handler().numEvents();
   }
 
 
-  double Analysis::sumOfWeights() const { 
-    return handler().sumOfWeights(); 
+  double Analysis::sumOfWeights() const {
+    return handler().sumOfWeights();
   }
 
 
@@ -89,7 +89,7 @@
     if (_info && !_info->name().empty()) return _info->name();
     return _defaultname;
   }
-  
+
   std::string Analysis::spiresId() const {
     if (!_info) return "NONE";
     return _info->spiresId();
@@ -99,7 +99,7 @@
     if (!_info) return std::vector<std::string>();
     return _info->authors();
   }
-  
+
   std::string Analysis::summary() const {
     if (!_info) return "NONE";
     return _info->summary();
@@ -114,17 +114,17 @@
     if (!_info) return "NONE";
     return _info->runInfo();
   }
-  
+
   std::string Analysis::experiment() const {
     if (!_info) return "NONE";
     return _info->experiment();
   }
-  
+
   std::string Analysis::collider() const {
     if (!_info) return "NONE";
     return _info->collider();
   }
-  
+
   const BeamPair& Analysis::beams() const {
     return _beams;
   }
@@ -133,7 +133,7 @@
     if (!_info) return "NONE";
     return _info->year();
   }
-  
+
   std::vector<std::string> Analysis::references() const {
     if (!_info) return std::vector<std::string>();
     return _info->references();
@@ -143,11 +143,11 @@
     if (!_info) return "UNVALIDATED";
     return _info->status();
   }
-  
+
   const BeamPair& Analysis::requiredBeams() const {
     return _beams;
   }
-  
+
   Analysis& Analysis::setBeams(const ParticleName& beam1, const ParticleName& beam2) {
     _beams.first = beam1;
     _beams.second = beam2;
@@ -157,31 +157,31 @@
   bool Analysis::isCompatible(const ParticleName& beam1, const ParticleName& beam2) const {
     BeamPair beams(beam1, beam2);
     return compatible(beams, requiredBeams());
-    /// @todo Need to also check internal consistency of the analysis' 
+    /// @todo Need to also check internal consistency of the analysis'
     /// beam requirements with those of the projections it uses.
   }
-  
+
   bool Analysis::isCompatible(const BeamPair& beams) const {
     return compatible(beams, requiredBeams());
-    /// @todo Need to also check internal consistency of the analysis' 
+    /// @todo Need to also check internal consistency of the analysis'
     /// beam requirements with those of the projections it uses.
   }
-  
+
   Analysis& Analysis::setCrossSection(const double& xs) {
     _crossSection = xs;
     _gotCrossSection = true;
     return *this;
   }
-  
+
   bool Analysis::needsCrossSection() const {
     return _needsCrossSection;
   }
-  
+
   Analysis& Analysis::setNeedsCrossSection(bool needed) {
     _needsCrossSection = needed;
     return *this;
   }
-  
+
   const double& Analysis::crossSection() const {
     if (!_gotCrossSection) {
       string errMsg = "You did not set the cross section for the analysis " + name();
@@ -189,7 +189,7 @@
     }
     return _crossSection;
   }
-  
+
   AnalysisHandler& Analysis::handler() const {
     return *_analysishandler;
   }
@@ -232,9 +232,9 @@
   }
 
 
-  IHistogram1D* Analysis::bookHistogram1D(const size_t datasetId, const size_t xAxisId, 
+  IHistogram1D* Analysis::bookHistogram1D(const size_t datasetId, const size_t xAxisId,
                                           const size_t yAxisId, const string& title,
-                                          const string& xtitle, const string& ytitle) 
+                                          const string& xtitle, const string& ytitle)
   {
     const string axisCode = _makeAxisCode(datasetId, xAxisId, yAxisId);
     return bookHistogram1D(axisCode, title, xtitle, ytitle);
@@ -260,7 +260,7 @@
 
   IHistogram1D* Analysis::bookHistogram1D(const string& hname,
                                           const size_t nbins, const double lower, const double upper,
-                                          const string& title, 
+                                          const string& title,
                                           const string& xtitle, const string& ytitle) {
     _makeHistoDir();
     const string path = histoPath(hname);
@@ -274,7 +274,7 @@
 
   IHistogram1D* Analysis::bookHistogram1D(const string& hname,
                                           const vector<double>& binedges,
-                                          const string& title, 
+                                          const string& title,
                                           const string& xtitle, const string& ytitle) {
     _makeHistoDir();
     const string path = histoPath(hname);
@@ -289,7 +289,7 @@
   /////////////////
 
 
-  IProfile1D* Analysis::bookProfile1D(const size_t datasetId, const size_t xAxisId, 
+  IProfile1D* Analysis::bookProfile1D(const size_t datasetId, const size_t xAxisId,
                                       const size_t yAxisId, const string& title,
                                       const string& xtitle, const string& ytitle) {
     const string axisCode = _makeAxisCode(datasetId, xAxisId, yAxisId);
@@ -298,7 +298,7 @@
 
 
   IProfile1D* Analysis::bookProfile1D(const std::string& hname, const std::string& title,
-                                      const string& xtitle, const string& ytitle) 
+                                      const string& xtitle, const string& ytitle)
   {
     // Get the bin edges (only read the AIDA file once)
     _cacheBinEdges();
@@ -316,35 +316,35 @@
     IProfile1D* prof = histogramFactory().createProfile1D(path, title, edges);
     getLog() << Log::TRACE << "Made profile histogram " << hname <<  " for " << name() << endl;
     prof->setXTitle(xtitle);
-    prof->setYTitle(ytitle);    
+    prof->setYTitle(ytitle);
     return prof;
   }
 
 
   IProfile1D* Analysis::bookProfile1D(const string& hname,
                                       const size_t nbins, const double lower, const double upper,
-                                      const string& title, 
+                                      const string& title,
                                       const string& xtitle, const string& ytitle) {
     _makeHistoDir();
     const string path = histoPath(hname);
     IProfile1D* prof = histogramFactory().createProfile1D(path, title, nbins, lower, upper);
     getLog() << Log::TRACE << "Made profile histogram " << hname <<  " for " << name() << endl;
     prof->setXTitle(xtitle);
-    prof->setYTitle(ytitle);    
+    prof->setYTitle(ytitle);
     return prof;
   }
 
 
   IProfile1D* Analysis::bookProfile1D(const string& hname,
                                       const vector<double>& binedges,
-                                      const string& title, 
+                                      const string& title,
                                       const string& xtitle, const string& ytitle) {
     _makeHistoDir();
     const string path = histoPath(hname);
     IProfile1D* prof = histogramFactory().createProfile1D(path, title, binedges);
     getLog() << Log::TRACE << "Made profile histogram " << hname <<  " for " << name() << endl;
     prof->setXTitle(xtitle);
-    prof->setYTitle(ytitle);    
+    prof->setYTitle(ytitle);
     return prof;
   }
 
@@ -360,7 +360,7 @@
     IDataPointSet* dps = datapointsetFactory().create(path, title, 2);
     getLog() << Log::TRACE << "Made data point set " << hname <<  " for " << name() << endl;
     dps->setXTitle(xtitle);
-    dps->setYTitle(ytitle); 
+    dps->setYTitle(ytitle);
     return dps;
   }
 
@@ -383,7 +383,7 @@
   }
 
 
-  IDataPointSet* Analysis::bookDataPointSet(const size_t datasetId, const size_t xAxisId, 
+  IDataPointSet* Analysis::bookDataPointSet(const size_t datasetId, const size_t xAxisId,
                                             const size_t yAxisId, const string& title,
                                             const string& xtitle, const string& ytitle) {
     // Get the bin edges (only read the AIDA file once)
@@ -433,7 +433,7 @@
     }
     const string hpath = tree().findPath(dynamic_cast<const AIDA::IManagedObject&>(*histo));
     getLog() << Log::TRACE << "Normalizing histo " << hpath << " to " << norm << endl;
-    
+ 
     double oldintg = 0.0;
     int nBins = histo->axis().bins();
     for (int iBin = 0; iBin != nBins; ++iBin) {
@@ -444,7 +444,7 @@
       getLog() << Log::WARN << "Histo " << hpath << " has null integral during normalisation" << endl;
       return;
     }
-  
+
     // Scale by the normalisation factor.
     scale(histo, norm/oldintg);
   }
@@ -458,7 +458,7 @@
     }
     const string hpath = tree().findPath(dynamic_cast<const AIDA::IManagedObject&>(*histo));
     getLog() << Log::TRACE << "Scaling histo " << hpath << endl;
-    
+ 
     std::vector<double> x, y, ex, ey;
     for (size_t i = 0, N = histo->axis().bins(); i < N; ++i) {
       x.push_back(0.5 * (histo->axis().binLowerEdge(i) + histo->axis().binUpperEdge(i)));
@@ -472,24 +472,24 @@
       // We'd like to do this: ey.push_back(histo->binError(i) * scale);
       ey.push_back(histo->binError(i)*scale/(0.5*histo->axis().binWidth(i)));
     }
-    
+ 
     std::string title = histo->title();
     std::string xtitle = histo->xtitle();
     std::string ytitle = histo->ytitle();
 
     tree().mkdir("/tmpnormalize");
     tree().mv(hpath, "/tmpnormalize");
-    
+ 
     AIDA::IDataPointSet* dps = datapointsetFactory().createXY(hpath, title, x, y, ex, ey);
     dps->setXTitle(xtitle);
     dps->setYTitle(ytitle);
-    
+ 
     tree().rm(tree().findPath(dynamic_cast<AIDA::IManagedObject&>(*histo)));
     tree().rmdir("/tmpnormalize");
-    
+ 
     // Set histo pointer to null - it can no longer be used.
     histo = 0;
   }
-  
-  
+
+
 }

Modified: trunk/src/AnalysisHandler.cc
==============================================================================
--- trunk/src/AnalysisHandler.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/AnalysisHandler.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,7 +10,7 @@
 namespace Rivet {
 
 
-  AnalysisHandler::AnalysisHandler(string basefilename, 
+  AnalysisHandler::AnalysisHandler(string basefilename,
                                    string runname, HistoFormat storetype)
     : _runname(runname), _nRun(0), _iRun(0), _numEvents(0), _sumOfWeights(0.0) {
     _theAnalysisFactory = createAnalysisFactory();
@@ -18,20 +18,20 @@
   }
 
 
-  AnalysisHandler::AnalysisHandler(IAnalysisFactory& afac, string basefilename, 
+  AnalysisHandler::AnalysisHandler(IAnalysisFactory& afac, string basefilename,
                                    string runname, HistoFormat storetype)
-    : _runname(runname), _nRun(0), _iRun(0), _numEvents(0), _sumOfWeights(0.0), 
+    : _runname(runname), _nRun(0), _iRun(0), _numEvents(0), _sumOfWeights(0.0),
       _theAnalysisFactory(&afac) {
     _setupFactories(basefilename, storetype);
   }
-  
-  
+
+
   AnalysisHandler::~AnalysisHandler()
   {
   }
 
 
-  Log& AnalysisHandler::getLog() { 
+  Log& AnalysisHandler::getLog() {
     return Log::getLog("Rivet.Analysis.Handler");
   }
 
@@ -53,7 +53,7 @@
     }
     getLog() << Log::DEBUG << "Analysis handler initialised" << endl;
   }
-  
+
 
   void AnalysisHandler::analyze(const GenEvent& ge) {
     Event event(ge);
@@ -141,7 +141,7 @@
       }
     }
     foreach (Analysis* a, todelete) {
-      getLog() << Log::WARN << "Removing incompatible analysis '" 
+      getLog() << Log::WARN << "Removing incompatible analysis '"
                << a->name() << "'" << endl;
       _analyses.erase(a);
       delete a;
@@ -171,7 +171,7 @@
   void AnalysisHandler::commitData() {
     tree().commit();
   }
-  
+
 
   void AnalysisHandler::_normalizeTree(ITree& tree) {
     Log& log = getLog();
@@ -180,7 +180,7 @@
     const string tmpdir = "/RivetNormalizeTmp";
     tree.mkdir(tmpdir);
     foreach (const string& path, paths) {
-      
+   
       IManagedObject* hobj = tree.find(path);
       if (hobj) {
         IHistogram1D* histo = dynamic_cast<IHistogram1D*>(hobj);
@@ -215,21 +215,21 @@
         }
 
       }
-      
+   
     }
     tree.rmdir(tmpdir);
   }
-  
-  
+
+
   string AnalysisHandler::runName() const { return _runname; }
   size_t AnalysisHandler::numEvents() const { return _numEvents; }
   double AnalysisHandler::sumOfWeights() const { return _sumOfWeights; }
-  
+
   void AnalysisHandler::setSumOfWeights(const double& sum) {
     _sumOfWeights=sum;
   }
-  
-  
+
+
   std::vector<std::string> AnalysisHandler::analysisNames() {
     std::vector<std::string> rtn;
     foreach (Analysis* a, _analyses) {
@@ -237,8 +237,8 @@
     }
     return rtn;
   }
-  
-  
+
+
   AnalysisHandler& AnalysisHandler::addAnalyses(const std::vector<std::string>& analysisnames) {
     foreach (const string& aname, analysisnames) {
       //getLog() << Log::DEBUG << "Adding analysis '" << aname << "'" << endl;
@@ -246,37 +246,37 @@
     }
     return *this;
   }
-  
-  
+
+
   AnalysisHandler& AnalysisHandler::removeAnalyses(const std::vector<std::string>& analysisnames) {
     foreach (const string& aname, analysisnames) {
       removeAnalysis(aname);
     }
     return *this;
   }
-  
-  
-  
+
+
+
   AIDA::IAnalysisFactory& AnalysisHandler::analysisFactory() {
     return *_theAnalysisFactory;
   }
-  
-  
+
+
   AIDA::ITree& AnalysisHandler::tree() {
     return *_theTree;
   }
-  
-  
+
+
   AIDA::IHistogramFactory& AnalysisHandler::histogramFactory() {
     return *_theHistogramFactory;
   }
-  
-  
+
+
   AIDA::IDataPointSetFactory& AnalysisHandler::datapointsetFactory() {
     return *_theDataPointSetFactory;
   }
-  
-  
+
+
   bool AnalysisHandler::needCrossSection() const {
     bool rtn = false;
     foreach (const Analysis* a, _analyses) {
@@ -285,8 +285,8 @@
     }
     return rtn;
   }
-  
-  
+
+
   AnalysisHandler& AnalysisHandler::setCrossSection(double xs) {
     foreach (Analysis* a, _analyses) {
       a->setCrossSection(xs);

Modified: trunk/src/AnalysisInfo.cc
==============================================================================
--- trunk/src/AnalysisInfo.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/AnalysisInfo.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,11 +10,11 @@
 namespace Rivet {
 
 
-  /// Ideas: 
+  /// Ideas:
   ///  * search RIVET_DATA_PATH etc. for <name>.info.yaml
   ///  * how to determine the name?
   ///  * only populate pointer on Analysis when requested
-  ///  * use smart pointer: deletes automatically when Analysis 
+  ///  * use smart pointer: deletes automatically when Analysis
   ///    goes out of scope
 
 
@@ -37,7 +37,7 @@
       if (d.empty()) continue;
       /// @todo Use system-independent separator (e.g. Boost.File)
       datapath = d + "/" + ananame + ".info";
-      Log::getLog("Rivet.AnalysisInfo") 
+      Log::getLog("Rivet.AnalysisInfo")
         << Log::TRACE << "Looking for analysis data file '" << datapath << "'" << endl;
       if (access(datapath.c_str(), R_OK) == 0) {
         found = true;
@@ -57,8 +57,8 @@
       parser.GetNextDocument(doc);
       //cout << doc << endl;
     } catch (const YAML::ParserException& ex) {
-      Log::getLog("Rivet.AnalysisInfo") 
-        << Log::ERROR << "Parse error when reading analysis data from " 
+      Log::getLog("Rivet.AnalysisInfo")
+        << Log::ERROR << "Parse error when reading analysis data from "
         << datapath << endl;
       return 0;
     }
@@ -70,7 +70,7 @@
       stringstream sec;
       sec << it.second();
       const string secstr = sec.str().substr(0, sec.str().length()-1);
-      Log::getLog("Rivet.AnalysisInfo") 
+      Log::getLog("Rivet.AnalysisInfo")
         << Log::TRACE << key << ": " << secstr << endl;
       try {
         if (key == "Name") {
@@ -115,7 +115,7 @@
           << Log::WARN << "Type error when reading analysis data '"
           << key << "' from " << datapath << endl;
       }
-    }  
+    }
     //cout << *ai << endl;
     return ai;
   }

Modified: trunk/src/AnalysisLoader.cc
==============================================================================
--- trunk/src/AnalysisLoader.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/AnalysisLoader.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -21,7 +21,7 @@
   }
 
 
-  Analysis* AnalysisLoader::getAnalysis(const string& analysisname) { 
+  Analysis* AnalysisLoader::getAnalysis(const string& analysisname) {
     _loadAnalysisPlugins();
     AnalysisBuilderMap::const_iterator ai = _ptrs.find(analysisname);
     if (ai == _ptrs.end()) return 0;
@@ -51,7 +51,7 @@
     Log::getLog("Rivet.AnalysisLoader") << Log::TRACE << "Registering a plugin analysis called '" << name << "'" << endl;
     _ptrs[name] = ab;
   }
-  
+
 
   void AnalysisLoader::_loadAnalysisPlugins() {
     // Only run once
@@ -64,7 +64,7 @@
     env = getenv("RIVET_ANALYSIS_PATH");
     if (env) dirs += split(env);
     // Then the Rivet library install path
-    dirs += getLibPath();    
+    dirs += getLibPath();
     // And then the user's (non-system) library path
     env = getenv("LD_LIBRARY_PATH");
     if (env) dirs += split(env);

Modified: trunk/src/Cuts.cc
==============================================================================
--- trunk/src/Cuts.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Cuts.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -38,7 +38,7 @@
 
 
 
-  bool Cuts::checkConsistency() const { 
+  bool Cuts::checkConsistency() const {
     for (Cuts::const_iterator c = begin(); c != end(); ++c) {
       if (c->second.getLowerThan() < c->second.getLowerThan()) {
         ostringstream msg;

Modified: trunk/src/Event.cc
==============================================================================
--- trunk/src/Event.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Event.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -28,9 +28,9 @@
   }
 
 
-  // Convert the GenEvent to use conventional alignment 
+  // Convert the GenEvent to use conventional alignment
   // (proton or electron on +ve z-axis?)
-  // For example, FHerwig only produces DIS events in the 
+  // For example, FHerwig only produces DIS events in the
   // unconventional orientation and has to be corrected
   void _geNormAlignment(GenEvent& ge) {
     if (!ge.valid_beam_particles()) return;
@@ -50,7 +50,7 @@
     if (bps.first->pdg_id() != PROTON || bps.second->pdg_id() != PROTON) {
       if (bps.first->pdg_id() == PROTON) {
         plusgp = bps.first;
-      } else if (bps.second->pdg_id() == PROTON) { 
+      } else if (bps.second->pdg_id() == PROTON) {
         plusgp = bps.second;
       }
       if (plusgp && plusgp->momentum().pz() < 0) {
@@ -72,7 +72,7 @@
 
 
   Event::Event(const GenEvent& ge)
-    : _genEvent(ge), _weight(1.0) 
+    : _genEvent(ge), _weight(1.0)
   {
     // Set the weight if there is one, otherwise default to 1.0
     if (!_genEvent.weights().empty()) {
@@ -81,7 +81,7 @@
 
     // Use Rivet's preferred units if possible
     _geNormUnits(_genEvent);
-    
+ 
     // Use the conventional alignment
     _geNormAlignment(_genEvent);
 
@@ -91,9 +91,9 @@
 
 
   Event::Event(const Event& e)
-    : _genEvent(e._genEvent), 
-      _weight(e._weight) 
-  { 
+    : _genEvent(e._genEvent),
+      _weight(e._weight)
+  {
     //
   }
 

Modified: trunk/src/HistoHandler.cc
==============================================================================
--- trunk/src/HistoHandler.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/HistoHandler.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -15,8 +15,8 @@
   HistoHandler* HistoHandler::create() {
     if (!_instance) {
       _instance = new HistoHandler();
-      Log::getLog("Rivet.HistoHandler") 
-        << Log::TRACE << "Created new HistoHandler at " 
+      Log::getLog("Rivet.HistoHandler")
+        << Log::TRACE << "Created new HistoHandler at "
         << _instance << endl;
     }
     return _instance;
@@ -33,21 +33,21 @@
     _namedhistos.clear();
   }
 
-  
+
   // Delete contained pointers.
   HistoHandler::~HistoHandler() {
     clear();
   }
 
 
-  const AnalysisObject* HistoHandler::registerAnalysisObject(const Analysis& parent, 
-                                                             const AnalysisObject& ao, 
+  const AnalysisObject* HistoHandler::registerAnalysisObject(const Analysis& parent,
+                                                             const AnalysisObject& ao,
                                                              const string& name) {
     getLog() << Log::TRACE << "Trying to register"
              << " analysis object " << &ao
              << " for parent " << &parent << "(" << parent.name() << ")"
              << " with name '" << name << "'" << endl;
-    
+ 
     // If this name is already registered for this analysis, throw a complaint
     NamedHistosMap::const_iterator nhs = _namedhistos.find(&parent);
     if (nhs != _namedhistos.end()) {
@@ -69,7 +69,7 @@
 
   AnalysisObject* HistoHandler::_getAnalysisObject(const Analysis& parent,
                                                          const string& name) const {
-    getLog() << Log::TRACE << "Searching for child histo '" 
+    getLog() << Log::TRACE << "Searching for child histo '"
              << name << "' of " << &parent << endl;
 
     NamedHistosMap::const_iterator nhs = _namedhistos.find(&parent);

Modified: trunk/src/Jet.cc
==============================================================================
--- trunk/src/Jet.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Jet.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,7 +6,7 @@
 namespace Rivet {
 
 
-  Jet::Jet() 
+  Jet::Jet()
     : ParticleBase()
   {
     clear();
@@ -33,7 +33,7 @@
     _resetCaches();
     return *this;
   }
-    
+ 
 
   bool Jet::containsParticle(const Particle& particle) const {
     const int barcode = particle.genParticle().barcode();
@@ -150,7 +150,7 @@
     return momentum().eta();
 
   }
-  
+
 
   double Jet::phi() const {
     return momentum().phi();
@@ -162,13 +162,13 @@
     return _momentum;
   }
 
-    
-  // FourMomentum& Jet::momentum() { 
+ 
+  // FourMomentum& Jet::momentum() {
   //   _calcMomVector();
   //   return _momentum;
   // }
 
-    
+ 
   double Jet::ptSum() const {
     return momentum().pT();
   }
@@ -184,8 +184,8 @@
     _okPtWeightedEta = false;
     _okMomentum = false;
   }
-  
-  
+
+
   void Jet::_calcMomVector() const {
     if (!_okMomentum) {
       _momentum = accumulate(begin(), end(), FourMomentum());
@@ -210,6 +210,6 @@
       _okPtWeightedPhi = true;
     }
   }
-  
-  
+
+
 }

Modified: trunk/src/Makefile.am
==============================================================================
--- trunk/src/Makefile.am	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Makefile.am	Thu Nov 19 15:02:51 2009	(r2080)
@@ -1,4 +1,4 @@
-SUBDIRS = Projections 
+SUBDIRS = Projections
 if ENABLE_ANALYSES
 SUBDIRS += Analyses
 endif
@@ -10,7 +10,7 @@
   Event.cc Jet.cc \
   ProjectionApplier.cc Projection.cc \
   Analysis.cc AnalysisLoader.cc AnalysisInfo.cc \
-  AnalysisHandler.cc Run.cc ProjectionHandler.cc HistoHandler.cc 
+  AnalysisHandler.cc Run.cc ProjectionHandler.cc HistoHandler.cc
 
 libRivet_la_CPPFLAGS = $(AM_CPPFLAGS) -I$(top_srcdir)/src/Tools/yaml-cpp $(CPPFLAGS)
 

Modified: trunk/src/Projection.cc
==============================================================================
--- trunk/src/Projection.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projection.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -34,8 +34,8 @@
       return thisid.before(otherid);
     }
   }
-  
-  
+
+
   const set<BeamPair> Projection::beamPairs() const {
     set<BeamPair> ret = _beamPairs;
     set<ConstProjectionPtr> projs = getProjections();
@@ -48,13 +48,13 @@
   }
 
 
-  Cmp<Projection> Projection::mkNamedPCmp(const Projection& otherparent, 
+  Cmp<Projection> Projection::mkNamedPCmp(const Projection& otherparent,
                                           const string& pname) const {
     return pcmp(*this, otherparent, pname);
   }
 
 
-  Cmp<Projection> Projection::mkPCmp(const Projection& otherparent, 
+  Cmp<Projection> Projection::mkPCmp(const Projection& otherparent,
                                      const string& pname) const {
     return pcmp(*this, otherparent, pname);
   }

Modified: trunk/src/ProjectionApplier.cc
==============================================================================
--- trunk/src/ProjectionApplier.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/ProjectionApplier.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,7 +6,7 @@
 namespace Rivet {
 
 
-  // NB. Allow proj registration in constructor by default -- explicitly disable for Analysis 
+  // NB. Allow proj registration in constructor by default -- explicitly disable for Analysis
   ProjectionApplier::ProjectionApplier()
     : _allowProjReg(true),
       _projhandler(ProjectionHandler::create())
@@ -18,22 +18,22 @@
   }
 
 
-  const Projection& ProjectionApplier::_applyProjection(const Event& evt, 
+  const Projection& ProjectionApplier::_applyProjection(const Event& evt,
                                                         const string& name) const {
     return evt.applyProjection(getProjection(name));
   }
-  
 
-  const Projection& ProjectionApplier::_applyProjection(const Event& evt, 
+
+  const Projection& ProjectionApplier::_applyProjection(const Event& evt,
                                                         const Projection& proj) const {
     return evt.applyProjection(proj);
   }
 
 
-  const Projection& ProjectionApplier::_addProjection(const Projection& proj, 
+  const Projection& ProjectionApplier::_addProjection(const Projection& proj,
                                                       const std::string& name) {
     if (!_allowProjReg) {
-      getLog() << Log::ERROR << "Trying to register projection '" 
+      getLog() << Log::ERROR << "Trying to register projection '"
                << proj.name() << "' before init phase in '" << this->name() << "'." << endl;
       exit(2);
     }

Modified: trunk/src/ProjectionHandler.cc
==============================================================================
--- trunk/src/ProjectionHandler.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/ProjectionHandler.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -39,7 +39,7 @@
   }
 
 
-  
+
   // Delete contained pointers.
   ProjectionHandler::~ProjectionHandler() {
     clear();
@@ -50,8 +50,8 @@
   // Take a Projection, compare it to the others on record, and return (by
   // reference) an equivalent Projection which is guaranteed to be the
   // (persistent) version that will be applied to an event.
-  const Projection& ProjectionHandler::registerProjection(const ProjectionApplier& parent, 
-                                                          const Projection& proj, 
+  const Projection& ProjectionHandler::registerProjection(const ProjectionApplier& parent,
+                                                          const Projection& proj,
                                                           const string& name)
   {
     getLog() << Log::TRACE << "Trying to register"
@@ -81,8 +81,8 @@
 
 
   // Attach and retrieve a projection as a pointer.
-  const Projection* ProjectionHandler::registerProjection(const ProjectionApplier& parent, 
-                                                          const Projection* proj, 
+  const Projection* ProjectionHandler::registerProjection(const ProjectionApplier& parent,
+                                                          const Projection* proj,
                                                           const string& name) {
     if (proj == 0) return 0;
     const Projection& p = registerProjection(parent, *proj, name);
@@ -92,20 +92,20 @@
 
 
   // Clone neatly
-  const Projection* ProjectionHandler::_clone(const ProjectionApplier& parent, 
+  const Projection* ProjectionHandler::_clone(const ProjectionApplier& parent,
                                               const Projection& proj)
   {
     // Clone a new copy of the passed projection on the heap
     getLog() << Log::TRACE << "Cloning projection " << proj.name() << " from " << &proj << endl;
     const Projection* newproj = proj.clone();
     getLog() << Log::TRACE << "Cloned projection " << proj.name() << " at " << newproj << endl;
-    
-    // Copy all the child ProjHandles when cloning, since otherwise links to "stack parents" 
+ 
+    // Copy all the child ProjHandles when cloning, since otherwise links to "stack parents"
     // will be generated by their children, without any connection to the cloned parent
     if (&proj != newproj) {
       NamedProjsMap::const_iterator nps = _namedprojs.find(&proj);
       if (nps != _namedprojs.end()) {
-        getLog() << Log::TRACE << "Cloning registered projections list: " 
+        getLog() << Log::TRACE << "Cloning registered projections list: "
                  << &proj << " -> " << newproj << endl;
         _namedprojs[newproj] = nps->second;
       }
@@ -119,8 +119,8 @@
   // Take a Projection, compare it to the others on record, and
   // return (by reference) an equivalent Projection which is guaranteed to be
   // the version that will be applied to an event.
-  const Projection* ProjectionHandler::_register(const ProjectionApplier& parent, 
-                                                 const Projection& proj, 
+  const Projection* ProjectionHandler::_register(const ProjectionApplier& parent,
+                                                 const Projection& proj,
                                                  const string& name)
   {
     ProjHandle ph(&proj);
@@ -143,20 +143,20 @@
   {
     // Get class type using RTTI
     const std::type_info& newtype = typeid(proj);
-    getLog() << Log::TRACE << "RTTI type of " << &proj << " is " << newtype.name() << endl; 
+    getLog() << Log::TRACE << "RTTI type of " << &proj << " is " << newtype.name() << endl;
 
     // Compare to ALL projections via _projs collection
-    getLog() << Log::TRACE << "Comparing " << &proj 
+    getLog() << Log::TRACE << "Comparing " << &proj
              << " with " << _projs.size()
              << " registered projection" << (_projs.size() == 1 ? "" : "s") <<  endl;
     foreach (const ProjHandle& ph, _projs) {
       // Make sure the concrete types match, using RTTI.
       const std::type_info& regtype = typeid(*ph);
-      getLog() << Log::TRACE << "RTTI type comparison with " << ph << ": " 
-               << newtype.name() << " vs. " << regtype.name() << endl; 
+      getLog() << Log::TRACE << "RTTI type comparison with " << ph << ": "
+               << newtype.name() << " vs. " << regtype.name() << endl;
       if (newtype != regtype) continue;
       getLog() << Log::TRACE << "RTTI type matches with " << ph << endl;
-      
+   
       // Test for semantic match
       if (pcmp(*ph, proj) != EQUIVALENT) {
         getLog() << Log::TRACE << "Projections at "
@@ -181,7 +181,7 @@
       //const string parentname = nps.first->name();
       msg << nps.first << endl; //"(" << parentname << ")" << endl;
       foreach (const NamedProjs::value_type& np, nps.second) {
-        msg << "  " << np.second << " (" << np.second->name() 
+        msg << "  " << np.second << " (" << np.second->name()
             << ", locally called '" << np.first << "')" << endl;
       }
       msg << endl;
@@ -192,9 +192,9 @@
 
 
   // Check that the same parent hasn't already used this name for something else
-  bool ProjectionHandler::_checkDuplicate(const ProjectionApplier& parent, 
+  bool ProjectionHandler::_checkDuplicate(const ProjectionApplier& parent,
                                           const Projection& proj,
-                                          const string& name) const 
+                                          const string& name) const
   {
     NamedProjsMap::const_iterator ipnps = _namedprojs.find(&parent);
     if (ipnps != _namedprojs.end()) {
@@ -204,8 +204,8 @@
         const ProjHandle pph = ipph->second;
         getLog() << Log::ERROR << "Projection clash! "
                  << parent.name() << " (" << &parent << ") "
-                 << "is trying to overwrite its registered '" << name << "' " 
-                 << "projection (" << pph << "=" 
+                 << "is trying to overwrite its registered '" << name << "' "
+                 << "projection (" << pph << "="
                  << pph->name() << ") with a non-equivalent projection "
                  << "(" << &proj << "=" << proj.name() << ")" << endl;
         getLog() << Log::ERROR << _getStatus();
@@ -234,7 +234,7 @@
 
 
   set<const Projection*> ProjectionHandler::getChildProjections(const ProjectionApplier& parent,
-                                                                ProjDepth depth) const 
+                                                                ProjDepth depth) const
   {
     set<const Projection*> toplevel;
     NamedProjs nps = _namedprojs.find(&parent)->second;
@@ -260,7 +260,7 @@
 
   const Projection& ProjectionHandler::getProjection(const ProjectionApplier& parent,
                                                      const string& name) const {
-    getLog() << Log::TRACE << "Searching for child projection '" 
+    getLog() << Log::TRACE << "Searching for child projection '"
              << name << "' of " << &parent << endl;
     NamedProjsMap::const_iterator nps = _namedprojs.find(&parent);
     if (nps == _namedprojs.end()) {

Modified: trunk/src/Projections/Beam.cc
==============================================================================
--- trunk/src/Projections/Beam.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/Beam.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -8,14 +8,14 @@
 
   void Beam::project(const Event& e) {
     assert(e.genEvent().particles_size() >= 2);
-    std::pair<HepMC::GenParticle*, HepMC::GenParticle*> beams = 
+    std::pair<HepMC::GenParticle*, HepMC::GenParticle*> beams =
       e.genEvent().beam_particles();
     assert(beams.first);
     _theBeams.first = *(beams.first);
     assert(beams.second);
     _theBeams.second = *(beams.second);
 
-    getLog() << Log::DEBUG << "Beam particle IDs = " 
+    getLog() << Log::DEBUG << "Beam particle IDs = "
              << _theBeams.first.pdgId() << ", "
              << _theBeams.second.pdgId() << endl;
   }

Modified: trunk/src/Projections/ChargedFinalState.cc
==============================================================================
--- trunk/src/Projections/ChargedFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/ChargedFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -8,42 +8,42 @@
 namespace Rivet {
 
 
-  ChargedFinalState::ChargedFinalState(const FinalState& fsp) { 
+  ChargedFinalState::ChargedFinalState(const FinalState& fsp) {
     setName("ChargedFinalState");
     addProjection(fsp, "FS");
   }
-   
- 
-  ChargedFinalState::ChargedFinalState(double mineta, double maxeta, double minpt) { 
+
+
+  ChargedFinalState::ChargedFinalState(double mineta, double maxeta, double minpt) {
     setName("ChargedFinalState");
     addProjection(FinalState(mineta, maxeta, minpt), "FS");
   }
-  
+
 
   int ChargedFinalState::compare(const Projection& p) const {
     return mkNamedPCmp(p, "FS");
   }
 
-  
+
   bool chargedParticleFilter(const Particle& p) {
     return PID::threeCharge(p.pdgId()) == 0;
   }
 
-  
+
   void ChargedFinalState::project(const Event& e) {
     const FinalState& fs = applyProjection<FinalState>(e, "FS");
     _theParticles.clear();
-    std::remove_copy_if(fs.particles().begin(), fs.particles().end(), 
+    std::remove_copy_if(fs.particles().begin(), fs.particles().end(),
                         std::back_inserter(_theParticles), chargedParticleFilter);
-    getLog() << Log::DEBUG << "Number of charged final-state particles = " 
+    getLog() << Log::DEBUG << "Number of charged final-state particles = "
              << _theParticles.size() << endl;
     if (getLog().isActive(Log::TRACE)) {
       for (vector<Particle>::iterator p = _theParticles.begin(); p != _theParticles.end(); ++p) {
-        getLog() << Log::TRACE << "Selected: " << p->pdgId() 
+        getLog() << Log::TRACE << "Selected: " << p->pdgId()
                  << ", charge = " << PID::threeCharge(p->pdgId())/3.0 << endl;
       }
     }
-  } 
-  
+  }
+
 
 }

Modified: trunk/src/Projections/DISKinematics.cc
==============================================================================
--- trunk/src/Projections/DISKinematics.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/DISKinematics.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,7 +13,7 @@
     // Identify beam hadron
     const ParticlePair& inc = applyProjection<Beam>(e, "Beam").beams();
     bool firstIsHadron  = PID::isHadron(inc.first.pdgId());
-    bool secondIsHadron = PID::isHadron(inc.second.pdgId());    
+    bool secondIsHadron = PID::isHadron(inc.second.pdgId());
     if (firstIsHadron && !secondIsHadron) {
       _inHadron = inc.first;
     } else if (!firstIsHadron && secondIsHadron) {
@@ -72,7 +72,7 @@
 
   int DISKinematics::compare(const Projection & p) const {
     const DISKinematics& other = pcast<DISKinematics>(p);
-    return mkNamedPCmp(other, "Lepton"); 
+    return mkNamedPCmp(other, "Lepton");
   }
 
 

Modified: trunk/src/Projections/DISLepton.cc
==============================================================================
--- trunk/src/Projections/DISLepton.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/DISLepton.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,19 +9,19 @@
   int DISLepton::compare(const Projection& p) const {
     const DISLepton& other = pcast<DISLepton>(p);
     return
-      mkNamedPCmp(other, "Beam") || 
+      mkNamedPCmp(other, "Beam") ||
       mkNamedPCmp(other, "FS");
   }
 
 
   void DISLepton::project(const Event& e) {
     const ParticlePair& inc = applyProjection<Beam>(e, "Beam").beams();
-    
+ 
     Particle inLepton;
-    
+ 
     bool firstIsLepton = PID::isLepton(inc.first.pdgId());
     bool secondIsLepton = PID::isLepton(inc.second.pdgId());
-    
+ 
     if(firstIsLepton && !secondIsLepton){
       _incoming = inc.first;
     }else if(!firstIsLepton && secondIsLepton){
@@ -30,12 +30,12 @@
       //eek!
       throw	Error("DISLepton projector could not find the correct beam. ");
     }
-    
+ 
     _sign = (_incoming.momentum().pz() > 0.0)? 1.0: -1.0;
     long id = _incoming.pdgId();
-    
+ 
     double pzMax = -1000000000.0;
-    
+ 
     const FinalState& fs = applyProjection<FinalState>(e, "FS");
     foreach (const Particle& p, fs.particles()) {
       double pz = _sign * p.momentum().pz();
@@ -44,7 +44,7 @@
         pzMax = pz;
       }
     }
-    
+ 
     if (!_outgoing.hasGenParticle()) {
       throw Error("DISLepton projector could not find the scattered lepton.");
     }

Modified: trunk/src/Projections/FinalState.cc
==============================================================================
--- trunk/src/Projections/FinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/FinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,26 +13,26 @@
     const bool openpt = isZero(minpt);
     const bool openeta = (mineta <= -MAXRAPIDITY && maxeta >= MAXRAPIDITY);
     getLog() << Log::TRACE << "Check for open FS conditions:" << std::boolalpha
-             << " eta=" << openeta 
+             << " eta=" << openeta
              << ", pt=" << openpt << endl;
     if (!openeta || !openpt) {
-      addProjection(FinalState(), "OpenFS");    
+      addProjection(FinalState(), "OpenFS");
       if (!openeta) {
         _etaRanges.push_back(make_pair(mineta, maxeta));
       }
     }
   }
-  
-  
+
+
   FinalState::FinalState(const vector<pair<double, double> >& etaRanges, double minpt)
     : _etaRanges(etaRanges), _ptmin(minpt)
-  { 
+  {
     setName("FinalState");
     const bool openpt = isZero(minpt);
     /// @todo Properly check whether any of these eta ranges (or their combination) are actually open
     const bool openeta = etaRanges.empty();
     getLog() << Log::TRACE << "Check for open FS conditions:" << std::boolalpha
-             << " eta=" << openeta 
+             << " eta=" << openeta
              << ", pt=" << openpt << endl;
     if (!openeta || !openpt) {
       addProjection(FinalState(), "OpenFS");
@@ -65,7 +65,7 @@
 
     // Handle "open FS" special case
     if (_etaRanges.empty() && _ptmin == 0) {
-      getLog() << Log::TRACE << "Open FS processing: should only see this once per event (" 
+      getLog() << Log::TRACE << "Open FS processing: should only see this once per event ("
                << e.genEvent().event_number() << ")" << endl;
       foreach (const GenParticle* p, Rivet::particles(e.genEvent())) {
         if (p->status() == 1) {
@@ -83,14 +83,14 @@
       const bool passed = accept(p);
       if (getLog().isActive(Log::TRACE)) {
         getLog() << Log::TRACE
-                 << "Choosing: ID = " << p.pdgId() 
-                 << ", pT = " << p.momentum().pT() 
-                 << ", eta = " << p.momentum().eta() 
+                 << "Choosing: ID = " << p.pdgId()
+                 << ", pT = " << p.momentum().pT()
+                 << ", eta = " << p.momentum().eta()
                  << ": result = " << std::boolalpha << passed << endl;
       }
       if (passed) _theParticles.push_back(p);
     }
-    getLog() << Log::DEBUG << "Number of final-state particles = " 
+    getLog() << Log::DEBUG << "Number of final-state particles = "
              << _theParticles.size() << endl;
   }
 
@@ -106,7 +106,7 @@
       if (pT < _ptmin) return false;
     }
 
-    // Check eta cuts 
+    // Check eta cuts
     if (!_etaRanges.empty()) {
       bool eta_pass = false;
       const double eta = p.momentum().eta();
@@ -119,7 +119,7 @@
       }
       if (!eta_pass) return false;
     }
-    
+ 
     return true;
   }
 

Modified: trunk/src/Projections/FoxWolframMoments.cc
==============================================================================
--- trunk/src/Projections/FoxWolframMoments.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/FoxWolframMoments.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -4,63 +4,63 @@
 #include "Rivet/Cmp.hh"
 
 namespace Rivet {
-    
+ 
     int FoxWolframMoments::compare(const Projection& p) const {
         return mkNamedPCmp(p, "FS");
     }
-    
-    
+ 
+ 
     void FoxWolframMoments::project(const Event& e) {
         Log log = getLog();
-        
+     
         // Project into final state and get total visible momentum
         const FinalState& fs             = applyProjection<FinalState>(e, "VFS");
 
 /*        const FastJets &jetProjC4   = applyProjection<FastJets>(e, "JetsC4");
 		Jets theJetsC4  = jetProjC4.jetsByPt(20.0);
-        
+     
         Jets goodJetsC4;
 		foreach(const Jet& jet, theJetsC4)
 		{
             //const double jetphi = jet.momentum().azimuthalAngle(ZERO_2PI);
             //const double jeteta = jet.momentum().pseudorapidity();
             const double jpt    = jet.momentum().pT();
-            
+         
 			if( jpt > 20.0 && goodJetsC4.size() < 4 )// && fabs(jeteta) < 2.5 )
             {
-                goodJetsC4.push_back(jet);    
+                goodJetsC4.push_back(jet);
             }
 		}
-  */      
+  */
         // remember: # pairs = N! / ( r! * (N-r)! )
-        
+     
         // N.B.: Autocorrelations are included! Treat them separately as diagonal elements.
         // see: http://cepa.fnal.gov/psm/simulation/mcgen/lund/pythia_manual/pythia6.3/pythia6301/node215.html
-        
+     
         double sumEnergy = 0.0;
-        for (ParticleVector::const_iterator pi = fs.particles().begin(); pi != fs.particles().end(); ++pi) 
+        for (ParticleVector::const_iterator pi = fs.particles().begin(); pi != fs.particles().end(); ++pi)
         //for ( Jets::const_iterator pi = goodJetsC4.begin() ; pi != goodJetsC4.end() ; ++pi )
         {
             sumEnergy += pi->momentum().E();
 
             const FourMomentum pi_4 = pi->momentum();
 
-            for (ParticleVector::const_iterator pj = pi+1; pj != fs.particles().end(); ++pj) 
+            for (ParticleVector::const_iterator pj = pi+1; pj != fs.particles().end(); ++pj)
             //for ( Jets::const_iterator pj = pi + 1 ; pj != goodJetsC4.end() ; ++pj )
             {
                 if ( pi == pj ) continue;
-                
+             
                 const FourMomentum pj_4 = pj->momentum();
-                
+             
                 // Calculate x_ij = cos(theta_ij)
                 double x_ij = 1.0;
                 if ( pi != pj ) {
                     double denom =  pi_4.vector3().mod() * pj_4.vector3().mod();
                     x_ij = pi_4.vector3().dot( pj_4.vector3() ) / denom;
                 }
-                
+             
                 //log << Log::DEBUG << "x_ij = " << x_ij << endl;
-                
+             
                 //const double core = fabs( pi_4 * pj_4 ); //  / sumet2 ;
 				const double core = pi_4.vector3().mod() * pi_4.vector3().mod();
 				
@@ -69,7 +69,7 @@
                     _fwmoments[order] += 2.0 * core * gsl_sf_legendre_Pl( order, x_ij ) ;
                 }
             } // end loop over p_j
-            
+         
             // Now add autocorrelations
             // Obviously cos(theta_ij) = 1.0
             // Note that P_l(1) == 1 for each l
@@ -77,18 +77,18 @@
                     _fwmoments[order] += fabs( pi_4 * pi_4 );
             }
         } // end loop over p_i
-        
-        
+     
+     
         log << Log::DEBUG << "sumEnergy = " << sumEnergy << endl;
-        
+     
         for ( int order = 0; order < MAXMOMENT ; ++order ) {
             _fwmoments[order] /= (sumEnergy*sumEnergy);
         }
-        
+     
         // Normalize to H0
         for ( int order = 1; order < MAXMOMENT ; ++order ) {
             _fwmoments[order] /= _fwmoments[0];
         }
     }
-    
+ 
 }

Modified: trunk/src/Projections/HadronicFinalState.cc
==============================================================================
--- trunk/src/Projections/HadronicFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/HadronicFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,20 +12,20 @@
     return FinalState::compare(p);
   }
 
-  
+
   bool hadronFilter(const Particle& p) {
     return ! PID::isHadron(p.pdgId());
   }
 
-  
+
   void HadronicFinalState::project(const Event& e) {
     FinalState fsp = static_cast<FinalState>(*this);
     const FinalState& fs = applyProjection(e, fsp);
     _theParticles.clear();
-    std::remove_copy_if(fs.particles().begin(), fs.particles().end(), 
+    std::remove_copy_if(fs.particles().begin(), fs.particles().end(),
                         std::back_inserter(_theParticles), hadronFilter);
-    getLog() << Log::DEBUG << "Number of hadronic final-state particles = " 
+    getLog() << Log::DEBUG << "Number of hadronic final-state particles = "
              << _theParticles.size() << endl;
-  } 
-  
+  }
+
 }

Modified: trunk/src/Projections/IdentifiedFinalState.cc
==============================================================================
--- trunk/src/Projections/IdentifiedFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/IdentifiedFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -29,6 +29,6 @@
       }
     }
   }
-  
+
 
 }

Modified: trunk/src/Projections/InitialQuarks.cc
==============================================================================
--- trunk/src/Projections/InitialQuarks.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/InitialQuarks.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -40,8 +40,8 @@
         const int st = (*p)->status();
         const double pT = (*p)->momentum().perp();
         const double eta = (*p)->momentum().eta();
-        log << Log::TRACE << std::boolalpha 
-            << "ID = " << (*p)->pdg_id() << ", status = " << st << ", pT = " << pT 
+        log << Log::TRACE << std::boolalpha
+            << "ID = " << (*p)->pdg_id() << ", status = " << st << ", pT = " << pT
             << ", eta = " << eta << ": result = " << passed << endl;
         if (pv != NULL) {
           for (GenVertex::particles_in_const_iterator pp = pv->particles_in_const_begin() ;
@@ -60,12 +60,12 @@
       }
       if (passed) _theParticles.push_back(Particle(**p));
     }
-    log << Log::DEBUG << "Number of initial quarks = " 
+    log << Log::DEBUG << "Number of initial quarks = "
         << _theParticles.size() << endl;
     if (not _theParticles.empty())
       for (size_t i=0 ; i < _theParticles.size() ; i++)
-        log << Log::DEBUG << "Initial quark[" << i << "] = " 
+        log << Log::DEBUG << "Initial quark[" << i << "] = "
             << _theParticles[i].pdgId() << std::endl;
   }
-  
+
 }

Modified: trunk/src/Projections/InvMassFinalState.cc
==============================================================================
--- trunk/src/Projections/InvMassFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/InvMassFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -23,11 +23,11 @@
                                        double minmass, // min inv mass
                                        double maxmass) // max inv mass
     : _decayids(idpairs), _minmass(minmass), _maxmass(maxmass)
-  { 
+  {
     setName("InvMassFinalState");
     addProjection(fsp, "FS");
   }
-  
+
 
   int InvMassFinalState::compare(const Projection& p) const {
     // First compare the final states we are running on
@@ -49,10 +49,10 @@
     int decaycmp = cmp(_decayids, other._decayids);
     if (decaycmp != EQUIVALENT) return decaycmp;
 
-    // Finally compare them as final states 
+    // Finally compare them as final states
     return FinalState::compare(other);
-  } 
-  
+  }
+
 
 
   void InvMassFinalState::project(const Event& e) {
@@ -80,7 +80,7 @@
     }
 
     // Temporary container of selected particles iterators
-    // Useful to compare iterators and avoid double occurrences of the same 
+    // Useful to compare iterators and avoid double occurrences of the same
     // particle in case it matches with more than another particle
     vector<const Particle*> tmp;
 
@@ -98,13 +98,13 @@
             tmp.push_back(i2);
             _theParticles.push_back(*i2);
           }
-          getLog() << Log::DEBUG << "Selecting particles with IDs " 
+          getLog() << Log::DEBUG << "Selecting particles with IDs "
                    << i1->pdgId() << " & " << i2->pdgId()
                    << " and mass = " << v4.mass()/GeV << " GeV" << endl;
         }
       }
     }
-    
+ 
     getLog() << Log::DEBUG << "Selected " << _theParticles.size() << " particles." << endl;
     if (getLog().isActive(Log::TRACE)) {
       foreach (const Particle& p, _theParticles) {
@@ -113,6 +113,6 @@
       }
     }
   }
- 
- 
+
+
 }

Modified: trunk/src/Projections/JetAlg.cc
==============================================================================
--- trunk/src/Projections/JetAlg.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/JetAlg.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,6 +12,6 @@
     getLog() << Log::DEBUG << "Making visible final state from provided FS" << endl;
     addProjection(vfs, "FS");
   }
-  
-  
+
+
 }

Modified: trunk/src/Projections/JetShape.cc
==============================================================================
--- trunk/src/Projections/JetShape.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/JetShape.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -6,12 +6,12 @@
 
 
   /// Constructor.
-  JetShape::JetShape(const VetoedFinalState& vfsp, 
-                     const vector<FourMomentum>& jetaxes, 
-                     double rmin, double rmax, double interval, 
+  JetShape::JetShape(const VetoedFinalState& vfsp,
+                     const vector<FourMomentum>& jetaxes,
+                     double rmin, double rmax, double interval,
                      double r1minPsi, DeltaRScheme distscheme)
-    : _jetaxes(jetaxes), 
-      _rmin(rmin), _rmax(rmax), _interval(interval), 
+    : _jetaxes(jetaxes),
+      _rmin(rmin), _rmax(rmax), _interval(interval),
       _r1minPsi(r1minPsi), _distscheme(distscheme)
   {
     setName("JetShape");
@@ -51,7 +51,7 @@
       foreach (const Particle& p, vfs.particles()) {
         double drad_min = TWOPI;
         size_t i_drad_min = 0;
-        
+     
         // Identify "best match" jet axis for this particle
         for (size_t j = 0; j < _jetaxes.size(); ++j) {
           const double drad = deltaR(_jetaxes[j], p.momentum(), _distscheme);
@@ -80,8 +80,8 @@
         }
 
       }
-     
-      
+  
+   
       // Normalize to total pT
       for (size_t j = 0; j < _jetaxes.size(); j++) {
         const double psimax = _intjetshapes[j][_nbins-1];
@@ -94,9 +94,9 @@
         }
       }
 
-      
+   
     }
   }
-  
-  
+
+
 }

Modified: trunk/src/Projections/KtJets.cc
==============================================================================
--- trunk/src/Projections/KtJets.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/KtJets.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -10,9 +10,9 @@
   int KtJets::compare(const Projection& p) const {
     const KtJets& other = dynamic_cast<const KtJets&>(p);
     return \
-      mkNamedPCmp(other "FS") || 
+      mkNamedPCmp(other "FS") ||
       cmp(_type, other._type) ||
-      cmp(_angle, other._angle) || 
+      cmp(_angle, other._angle) ||
       cmp(_recom, other._recom) ||
       cmp(_rparameter, other._rparameter);
   }

Modified: trunk/src/Projections/LeadingParticlesFinalState.cc
==============================================================================
--- trunk/src/Projections/LeadingParticlesFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/LeadingParticlesFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -20,7 +20,7 @@
     return EQUIVALENT;
   }
 
-  
+
   void LeadingParticlesFinalState::project(const Event & e) {
     _theParticles.clear();
     const FinalState& fs = applyProjection<FinalState>(e, "FS");
@@ -35,7 +35,7 @@
       if (inList(*ifs) && FinalState::accept(ifs->genParticle())) {
         // Look for an existing particle in tmp container
         map < long, ParticleVector::const_iterator >::const_iterator itmp = tmp.find(ifs->pdgId());
-        if (itmp != tmp.end()) {  // if a particle with this type has been already selected 
+        if (itmp != tmp.end()) {  // if a particle with this type has been already selected
           // If the new pT is higher than the previous one, then substitute...
           if (ifs->momentum().pT() > itmp->second->momentum().pT()) {
             tmp[ifs->pdgId()] = ifs;
@@ -46,7 +46,7 @@
         }
       }
     }
-    
+ 
     // Loop on the tmp container and fill _theParticles
     map<long, ParticleVector::const_iterator>::const_iterator i;
     for (i = tmp.begin(); i != tmp.end(); ++i) {

Modified: trunk/src/Projections/LossyFinalState.cc
==============================================================================
--- trunk/src/Projections/LossyFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/LossyFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -13,17 +13,17 @@
     if (fscmp) return fscmp;
     return cmp(_lossFraction, other._lossFraction);
   }
-  
+
 
   void LossyFinalState::project(const Event& e) {
     const FinalState& fs = applyProjection<FinalState>(e, "FS");
     getLog() << Log::DEBUG << "Pre-loss number of FS particles = " << fs.particles().size() << endl;
     _theParticles.clear();
-    std::remove_copy_if(fs.particles().begin(), fs.particles().end(), 
+    std::remove_copy_if(fs.particles().begin(), fs.particles().end(),
                         std::back_inserter(_theParticles), RandomFilter(_lossFraction));
-    getLog() << Log::DEBUG << "Filtered number of FS particles = " << _theParticles.size() 
+    getLog() << Log::DEBUG << "Filtered number of FS particles = " << _theParticles.size()
              << " (should be ~" << (1-_lossFraction)*100 << "%)" << endl;
   }
 
-  
+
 }

Modified: trunk/src/Projections/PVertex.cc
==============================================================================
--- trunk/src/Projections/PVertex.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/PVertex.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -14,12 +14,12 @@
     if (!_thePVertex) {
       // Since no signal vertices are filled in existing Fortran & C++ MC's,
       // the decay vertex from first vertex in event with 2 incoming particles
-      
+   
       HepMC::GenEvent::vertex_const_iterator vIt = e.genEvent().vertices_begin();
       while((*vIt)->particles_in_size() != 2 && vIt != e.genEvent().vertices_end()){
         ++vIt;
       }
-      
+   
       if(vIt != e.genEvent().vertices_end()) _thePVertex = *vIt;
     }
     assert(_thePVertex);

Modified: trunk/src/Projections/SVertex.cc
==============================================================================
--- trunk/src/Projections/SVertex.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/SVertex.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -27,8 +27,8 @@
     const PVertex& pvtx = applyProjection<PVertex>(e, "PV");
     const Vector3 pvpos = pvtx.position();
     const ChargedFinalState& chfs = applyProjection<ChargedFinalState>(e, "FS");
-    
-    // Produce vector of vertices, each containing a vector of all charged 
+ 
+    // Produce vector of vertices, each containing a vector of all charged
     // final state particles belonging to this vertex
     typedef map<GenVertex*,ParticleVector> VtxPartsMap;
     VtxPartsMap vtxparts;
@@ -39,7 +39,7 @@
       HepMC::GenVertex* pvtx = p.genParticle().production_vertex();
       vtxparts[pvtx].push_back(p);
     }
-  
+
     // Check if jets are tagged, by means of selected vertices fulfilling track criteria
     _taggedjets.clear();
     for (VtxPartsMap::const_iterator vp = vtxparts.begin(); vp != vtxparts.end(); ++vp) {
@@ -53,31 +53,31 @@
         }
       }
     }
-    
+ 
   }
-  
-  
-  
-  /// Analysis dependent cuts on vertex tracks in SVertex projection 
-  /// Since the analysis specific cuts are very complex, they are not 
+
+
+
+  /// Analysis dependent cuts on vertex tracks in SVertex projection
+  /// Since the analysis specific cuts are very complex, they are not
   /// implemented in the projection and are instead passed via a function (object).
-  /// SVertex member function implementation below 
+  /// SVertex member function implementation below
   /// in: reference to instance of SVertex projection, ParticleVector of
   ///     vertex to be analyzed, primary (Gen)Vertex
-  /// out: FourMomentum = visible Momentum of vertex (selected tracks), 
-  /// return bool: cuts passed? 1 : 0 
+  /// out: FourMomentum = visible Momentum of vertex (selected tracks),
+  /// return bool: cuts passed? 1 : 0
   /// @todo Move this into the projection concrete class.
-  bool SVertex::_applyVtxTrackCuts(const ParticleVector& vtxparts, 
-                                   const Vector3& pvtxpos, 
-                                   FourMomentum vtxVisMom) 
+  bool SVertex::_applyVtxTrackCuts(const ParticleVector& vtxparts,
+                                   const Vector3& pvtxpos,
+                                   FourMomentum vtxVisMom)
   {
     // Check vertex final state charged particles, if fulfilling track criteria
-    size_t pass1trk1pTdcaSig25(0), pass1trk05pTdcaSig25(0), 
+    size_t pass1trk1pTdcaSig25(0), pass1trk05pTdcaSig25(0),
       pass2trk15pTdcaSig3(0), pass2trk1pTdcaSig3(0);
-    
+ 
     foreach (const Particle& vp, vtxparts) {
       const double IPsig = get2dClosestApproach(vp.genParticle(), pvtxpos) / _IPres;
-      
+   
       // Update "visible momentum" vector (returned by reference).
       if (vp.momentum().pT() > 0.5) {
         vtxVisMom += vp.momentum();
@@ -91,7 +91,7 @@
       if (vtxparts.size() >= 2 && IPsig > 3.) {
         if (vp.momentum().pT() > 1.5) pass2trk15pTdcaSig3++;
         else if (vp.momentum().pT() > 1.0) pass2trk1pTdcaSig3++;
-      } 
+      }
     }
 
     // Combine info from passes to make yes/no decision about whether this is significant:
@@ -100,6 +100,6 @@
     return false;
   }
 
-  
- 
+
+
 }

Modified: trunk/src/Projections/Sphericity.cc
==============================================================================
--- trunk/src/Projections/Sphericity.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/Sphericity.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,7 +9,7 @@
 
   Sphericity::Sphericity(const FinalState& fsp, double rparam)
     : _regparam(rparam)
-  { 
+  {
     setName("Sphericity");
     addProjection(fsp, "FS");
     clear();
@@ -71,7 +71,7 @@
 
     // Return (with "safe nonsense" sphericity params) if there are no final state particles.
     if (fsmomenta.empty()) {
-      getLog() << Log::DEBUG << "No particles in final state..." << endl; 
+      getLog() << Log::DEBUG << "No particles in final state..." << endl;
       clear();
       return;
     }
@@ -95,7 +95,7 @@
         for (size_t j = 0; j < 3; ++j) {
           mMomPart.set(i,j, p3[i]*p3[j]);
         }
-      } 
+      }
       mMom += regfactor * mMomPart;
     }
 
@@ -112,12 +112,12 @@
       getLog() << Log::ERROR << "[1,2] vs. [2,1]: " << mMom.get(1,2) << ", " << mMom.get(2,1) << endl;
     }
     // If not symmetric, something's wrong (we made sure the error msg appeared first).
-    assert(isSymm); 
+    assert(isSymm);
 
     // Diagonalize momentum matrix.
     const EigenSystem<3> eigen3 = diagonalize(mMom);
     getLog() << Log::DEBUG << "Diag momentum tensor = " << endl << eigen3.getDiagMatrix() << endl;
-    
+ 
     // Reset and set eigenvalue/vector parameters.
     _lambdas.clear();
     _sphAxes.clear();
@@ -129,12 +129,12 @@
     }
 
     // Debug output.
-    getLog() << Log::DEBUG << "Lambdas = (" 
+    getLog() << Log::DEBUG << "Lambdas = ("
              << lambda1() << ", " << lambda2() << ", " << lambda3() << ")" << endl;
     getLog() << Log::DEBUG << "Sum of lambdas = " << lambda1() + lambda2() + lambda3() << endl;
-    getLog() << Log::DEBUG << "Vectors = " 
+    getLog() << Log::DEBUG << "Vectors = "
              << sphericityAxis() << ", "
-             << sphericityMajorAxis() << ", " 
+             << sphericityMajorAxis() << ", "
              << sphericityMinorAxis() << ")" << endl;
   }
 

Modified: trunk/src/Projections/Thrust.cc
==============================================================================
--- trunk/src/Projections/Thrust.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/Thrust.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -111,7 +111,7 @@
 
     // Clear the caches
     _thrusts.clear();
-    _thrustAxes.clear(); 
+    _thrustAxes.clear();
 
 
     // If there are fewer than 2 visible particles, we can't do much
@@ -146,7 +146,7 @@
 
 
     // Temporary variables for calcs
-    Vector3 axis(0,0,0); 
+    Vector3 axis(0,0,0);
     double val = 0.;
 
     // Get thrust
@@ -159,7 +159,7 @@
     getLog() << Log::DEBUG << "Axis = " << axis << endl;
     _thrustAxes.push_back(axis);
 
-    // Get thrust major 
+    // Get thrust major
     vector<Vector3> threeMomenta;
     foreach (const Vector3& v, fsmomenta) {
       // Get the part of each 3-momentum which is perpendicular to the thrust axis
@@ -170,7 +170,7 @@
     _thrusts.push_back(val / momentumSum);
     if (axis.x() < 0) axis = -axis;
     axis = axis.unit();
-    _thrustAxes.push_back(axis); 
+    _thrustAxes.push_back(axis);
 
     // Get thrust minor
     if (_thrustAxes[0].dot(_thrustAxes[1]) < 1e-10) {

Modified: trunk/src/Projections/TriggerCDFRun0Run1.cc
==============================================================================
--- trunk/src/Projections/TriggerCDFRun0Run1.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/TriggerCDFRun0Run1.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -18,7 +18,7 @@
     // Event selection based on tracks in VTPC (time projection chambers)
     int n_backward = 0;
     int n_forward = 0;
-      
+   
     const ChargedFinalState& cfs = applyProjection<ChargedFinalState>(evt, "CFS");
     foreach (const Particle& p, cfs.particles()) {
       const double eta = p.momentum().pseudorapidity();
@@ -27,14 +27,14 @@
       if (inRange(eta, -3.0, 0.0)) n_backward++;
       else if (inRange(eta, 0.0, 3.0)) n_forward++;
     }
-    
+ 
     // Require at least 4 tracks with at least one in each of the forward
     // and backward hemispheres:
     if (n_trig_1 == 0 || n_trig_2 == 0) return;
     getLog() << Log::DEBUG << "Trigger 1: " << n_trig_1 << " Trigger 2: " << n_trig_2 << endl;
     if (n_backward + n_forward < 4 || n_backward == 0 || n_forward == 0) return;
     getLog() << Log::DEBUG << " Num. forward: " << n_forward  << ", Num. backward: " << n_backward << endl;
-    
+ 
     // Trigger success:
     _decision_mb = true;
   }

Modified: trunk/src/Projections/TriggerUA5.cc
==============================================================================
--- trunk/src/Projections/TriggerUA5.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/TriggerUA5.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -28,12 +28,12 @@
     getLog() << Log::DEBUG << "Trigger -: " << _n_minus << ", Trigger +: " << _n_plus << endl;
 
     // Common SD/NSD trigger requirement: must activate at least one hodoscope
-    if (_n_minus == 0 && _n_plus == 0) return; 
+    if (_n_minus == 0 && _n_plus == 0) return;
     _decision_sd = true;
 
     // Extra NSD trigger requirements
     if (_n_minus == 0 || _n_plus == 0) return;
-    _decision_nsd_1 = true;    
+    _decision_nsd_1 = true;
     if (_n_minus < 2 || _n_plus < 2) return;
     _decision_nsd_2 = true;
   }

Modified: trunk/src/Projections/UnstableFinalState.cc
==============================================================================
--- trunk/src/Projections/UnstableFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/UnstableFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,8 +11,8 @@
   int UnstableFinalState::compare(const Projection& p) const {
     const UnstableFinalState& other = dynamic_cast<const UnstableFinalState&>(p);
     return \
-      cmp(_etamin, other._etamin) || 
-      cmp(_etamax, other._etamax) || 
+      cmp(_etamin, other._etamin) ||
+      cmp(_etamax, other._etamax) ||
       cmp(_ptmin, other._ptmin);
   }
 
@@ -46,8 +46,8 @@
       }
 
       if (log.isActive(Log::TRACE)) {
-        log << Log::TRACE << std::boolalpha 
-            << "ID = " << (*p)->pdg_id() << ", status = " << st << ", pT = " << pT 
+        log << Log::TRACE << std::boolalpha
+            << "ID = " << (*p)->pdg_id() << ", status = " << st << ", pT = " << pT
             << ", eta = " << eta << ": result = " << passed << endl;
         if (pv!=NULL) {
           for (GenVertex::particles_in_const_iterator pp = pv->particles_in_const_begin() ;
@@ -66,7 +66,7 @@
       }
       if (passed) _theParticles.push_back(Particle(**p));
     }
-    log << Log::DEBUG << "Number of final-state particles = " 
+    log << Log::DEBUG << "Number of final-state particles = "
         << _theParticles.size() << endl;
   }
 

Modified: trunk/src/Projections/VetoedFinalState.cc
==============================================================================
--- trunk/src/Projections/VetoedFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/VetoedFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -37,12 +37,12 @@
     _theParticles.reserve(fs.particles().size());
     foreach (const Particle& p, fs.particles()) {
       if (getLog().isActive(Log::DEBUG)) {
-        vector<long> codes; 
+        vector<long> codes;
         for (VetoDetails::const_iterator code = _vetoCodes.begin(); code != _vetoCodes.end(); ++code) {
           codes.push_back(code->first);
         }
         const string codestr = "{ " + join(codes) + " }";
-        getLog() << Log::DEBUG << p.pdgId() << " vs. veto codes = " 
+        getLog() << Log::DEBUG << p.pdgId() << " vs. veto codes = "
                  << codestr << " (" << codes.size() << ")" << endl;
       }
       const long pdgid = p.pdgId();
@@ -63,7 +63,7 @@
         getLog() << Log::DEBUG << "ID = " << pdgid << ", pT range = " << rangess.str();
         stringstream debugline;
         debugline << "with PDG code = " << pdgid << " pT = " << p.momentum().pT();
-        if (pt < ptrange.first || pt > ptrange.second) { 
+        if (pt < ptrange.first || pt > ptrange.second) {
           getLog() << Log::DEBUG << "Storing " << debugline.str() << endl;
           _theParticles.push_back(p);
         } else {
@@ -71,7 +71,7 @@
         }
       }
     }
-  
+
     set<ParticleVector::iterator> toErase;
     for (set<int>::iterator nIt = _nCompositeDecays.begin();
          nIt != _nCompositeDecays.end() && !_theParticles.empty(); ++nIt) {
@@ -81,15 +81,15 @@
       start.insert(_theParticles.begin());
       oldMasses.insert(pair<set<ParticleVector::iterator>, FourMomentum>
                        (start, _theParticles.begin()->momentum()));
-      
+   
       for (int nParts = 1; nParts != *nIt; ++nParts) {
         for (map<set<ParticleVector::iterator>, FourMomentum>::iterator mIt = oldMasses.begin();
              mIt != oldMasses.end(); ++mIt) {
           ParticleVector::iterator pStart = *(mIt->first.rbegin());
-          for (ParticleVector::iterator pIt = pStart + 1; pIt != _theParticles.end(); ++pIt) { 
+          for (ParticleVector::iterator pIt = pStart + 1; pIt != _theParticles.end(); ++pIt) {
             FourMomentum cMom = mIt->second + pIt->momentum();
             set<ParticleVector::iterator> pList(mIt->first);
-            pList.insert(pIt);   
+            pList.insert(pIt);
             newMasses[pList] = cMom;
           }
         }
@@ -107,18 +107,18 @@
             if (mass < massRange.second && mass > massRange.first) {
               for (set<ParticleVector::iterator>::iterator lIt = mIt->first.begin();
                    lIt != mIt->first.end(); ++lIt) {
-                toErase.insert(*lIt);                
+                toErase.insert(*lIt);
               }
             }
           }
         }
       }
     }
-    
+ 
     for (set<ParticleVector::iterator>::reverse_iterator p = toErase.rbegin(); p != toErase.rend(); ++p) {
       _theParticles.erase(*p);
-    }    
-    
+    }
+ 
     for (ParentVetos::const_iterator vIt = _parentVetoes.begin(); vIt != _parentVetoes.end(); ++vIt) {
       for (ParticleVector::iterator p = _theParticles.begin(); p != _theParticles.end(); ++p) {
         GenVertex *startVtx=((*p).genParticle()).production_vertex();
@@ -127,7 +127,7 @@
         if (startVtx!=0) {
           for (GenVertex::particle_iterator pIt = startVtx->particles_begin(HepMC::ancestors);
                pIt != startVtx->particles_end(HepMC::ancestors) && !veto; ++pIt) {
-            
+         
             if (*vIt == (*pIt)->pdg_id()) {
               veto = true;
               p = _theParticles.erase(p);
@@ -147,7 +147,7 @@
         bool found = false;
         for (ParticleVector::const_iterator ipart = vfsp.begin(); ipart != vfsp.end(); ++ipart){
           if (!ipart->hasGenParticle()) continue;
-          getLog() << Log::DEBUG << "Comparing barcode " << icheck->genParticle().barcode() << " with veto particle " << ipart->genParticle().barcode() << endl; 
+          getLog() << Log::DEBUG << "Comparing barcode " << icheck->genParticle().barcode() << " with veto particle " << ipart->genParticle().barcode() << endl;
           if (ipart->genParticle().barcode() == icheck->genParticle().barcode()){
             found = true;
             break;
@@ -160,6 +160,6 @@
       }	
     }
   }
-  
+
 
 }

Modified: trunk/src/Projections/VisibleFinalState.cc
==============================================================================
--- trunk/src/Projections/VisibleFinalState.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/VisibleFinalState.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -17,6 +17,6 @@
     const FinalState& vfs = applyProjection<FinalState>(e, "VFS");
     _theParticles = vfs.particles();
   }
-  
+
 
 }

Modified: trunk/src/Projections/WFinder.cc
==============================================================================
--- trunk/src/Projections/WFinder.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/WFinder.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -64,7 +64,7 @@
     l_nu_ids += std::make_pair(-abs(pid), abs(nu_pid));
     InvMassFinalState imfs(fs, l_nu_ids, m2_min, m2_max);
     addProjection(imfs, "IMFS");
-    
+ 
     ClusteredPhotons cphotons(FinalState(), imfs, dRmax);
     addProjection(cphotons, "CPhotons");
 
@@ -98,8 +98,8 @@
     if (cmp != EQUIVALENT) return cmp;
 
     return EQUIVALENT;
-  } 
-  
+  }
+
 
   void WFinder::project(const Event& e) {
     _theParticles.clear();
@@ -110,7 +110,7 @@
     const int w3charge = PID::threeCharge(imfs.particles()[0].pdgId()) + PID::threeCharge(imfs.particles()[1].pdgId());
     assert(abs(w3charge) == 3);
     const int wcharge = sign(w3charge);
-    /// @todo Provide W charge method 
+    /// @todo Provide W charge method
 
     stringstream msg;
     string wsign = (wcharge == 1) ? "+" : "-";
@@ -135,6 +135,6 @@
     getLog() << Log::DEBUG << name() << " found " << _theParticles.size()
              << " W candidates." << endl;
   }
- 
- 
+
+
 }

Modified: trunk/src/Projections/ZFinder.cc
==============================================================================
--- trunk/src/Projections/ZFinder.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Projections/ZFinder.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -58,7 +58,7 @@
 
     InvMassFinalState imfs(fs, std::make_pair(pid, -pid), m2_min, m2_max);
     addProjection(imfs, "IMFS");
-    
+ 
     ClusteredPhotons cphotons(FinalState(), imfs, dRmax);
     addProjection(cphotons, "CPhotons");
 
@@ -91,8 +91,8 @@
     if (cmp != EQUIVALENT) return cmp;
 
     return EQUIVALENT;
-  } 
-  
+  }
+
 
   void ZFinder::project(const Event& e) {
     _theParticles.clear();
@@ -123,6 +123,6 @@
     getLog() << Log::DEBUG << name() << " found " << _theParticles.size()
              << " particles." << endl;
   }
- 
- 
+
+
 }

Modified: trunk/src/Run.cc
==============================================================================
--- trunk/src/Run.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Run.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -11,24 +11,24 @@
   Run::Run(AnalysisHandler& ah) : _ah(ah), _xs(-1.0),
     m_io(NULL), m_istr(NULL) {
   }
-  
-  
+
+
   Run::~Run() {
   }
-  
-  
+
+
   Run& Run::setCrossSection(const double xs) {
     _xs = xs;
     return *this;
   }
-  
-  
+
+
   Run& Run::setListAnalyses(const bool dolist) {
     _listAnalyses = dolist;
     return *this;
   }
-  
-  
+
+
   bool Run::prepareFile(const std::string& evtfile) {
     if (evtfile == "-") {
       m_io = new HepMC::IO_GenEvent(std::cin);
@@ -41,11 +41,11 @@
       Log::getLog("Rivet.Run") << Log::ERROR << "Read error on file " << evtfile << endl;
       return false;
     }
-  
+
     return true;
   }
 
-  
+
   bool Run::processEvent(bool firstEvent) {
     GenEvent* evt = new GenEvent();
     if (!m_io->fill_next_event(evt)) {
@@ -59,7 +59,7 @@
       delete evt;
       return false;
     }
-    
+ 
     // Get beam details from first event, and ensure they match for all following events
     if (evt->particles_size() != 0) {
       const BeamPair beams = beamIds(*evt);
@@ -100,7 +100,7 @@
         delete evt;
         return false;
       }
-      
+   
       if (_listAnalyses) {
         foreach (const std::string& ana, _ah.analysisNames()) {
           cout << ana << endl;
@@ -133,11 +133,11 @@
     }
 
     /// @todo If NOT first event, check that beams aren't changed
-    
-    // Analyze event and delete HepMC event object      
+ 
+    // Analyze event and delete HepMC event object
     _ah.analyze(*evt);
     delete evt;
-    
+ 
     return true;
   }
 
@@ -146,7 +146,7 @@
     // Final HepMC object clean-up
     delete m_io;
     if (m_istr) delete m_istr;
-    
+ 
     return true;
   }
 
@@ -160,5 +160,5 @@
     return _sqrts;
   }
 
-  
+
 }

Modified: trunk/src/Test/testBoost.cc
==============================================================================
--- trunk/src/Test/testBoost.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Test/testBoost.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -27,7 +27,7 @@
 //  and by ordering relationship (std::set).
 
 struct Foo
-{ 
+{
   Foo( int _x ) : x(_x) {}
   ~Foo() { std::cout << "Destructing a Foo with x=" << x << "\n"; }
   int x;
@@ -67,8 +67,8 @@
 
   std::cout << "foo_vector:\n";
   std::for_each( foo_vector.begin(), foo_vector.end(), FooPtrOps() );
-  
-  std::cout << "\nfoo_set:\n"; 
+
+  std::cout << "\nfoo_set:\n";
   std::for_each( foo_set.begin(), foo_set.end(), FooPtrOps() );
   std::cout << "\n";
 
@@ -79,7 +79,7 @@
 //   1
 //   3
 //   2
-//   
+//
 //   foo_set:
 //   3
 //   2
@@ -89,6 +89,6 @@
 //   Destructing a Foo with x=1
 //   Destructing a Foo with x=3
 //   Destructing a Foo with x=2
-   
+
   return 0;
 }

Modified: trunk/src/Test/testMatVec.cc
==============================================================================
--- trunk/src/Test/testMatVec.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Test/testMatVec.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -9,7 +9,7 @@
 
 int main() {
   using namespace Rivet;
-  
+
   FourVector a(1,0,0,0);
   cout << a << ": interval = " << a.invariant() << endl;
   assert(fuzzyEquals(a.invariant(), 1));
@@ -60,7 +60,7 @@
   cout << m1+m2 << " == " << m3 << ": " << (m1+m2 == m3 ? "true" : "false") << endl;
   cout << endl;
 
-  
+
   Vector3 v3(1,2,3);
   cout << "Vector: " << v3 << endl;
   cout << "Invert: " << v3 << " --> " << -v3 << endl;
@@ -120,7 +120,7 @@
   cout << "LTx: " << ltX << endl;
   cout << "I on LTx: " << ltX.rotate(Matrix3::mkIdentity()) << endl;
   cout << "Rot90 on LTx: " << ltX.rotate(rot90) << endl;
-  cout << endl;  
+  cout << endl;
 
   cout << "X-boosts:" << endl;
   const FourMomentum p1 = FourMomentum(10,0,0,1);
@@ -133,9 +133,9 @@
   cout << endl;
 
   LorentzTransform ltY(0,0.4,0);
-  cout << FourMomentum(1,0,0,1) << " -> " //<< "\n  " 
+  cout << FourMomentum(1,0,0,1) << " -> " //<< "\n  "
        << (ltX * ltY).transform(FourMomentum(1,0,0,1)) << endl;
-  cout << FourMomentum(1,0,0,1) << " -> " //<< "\n  " 
+  cout << FourMomentum(1,0,0,1) << " -> " //<< "\n  "
        << (ltY * ltX).transform(FourMomentum(1,0,0,1)) << endl;
   cout << (ltX * ltY).boost() << endl;
   cout << (ltY * ltX).boost() << endl;

Modified: trunk/src/Tools/BinnedHistogram.cc
==============================================================================
--- trunk/src/Tools/BinnedHistogram.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/BinnedHistogram.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -12,13 +12,13 @@
 
 namespace Rivet{
 
-  template<typename T> 
+  template<typename T>
   const BinnedHistogram<T> &BinnedHistogram<T>::addHistogram
   (const T &binMin, const T &binMax, AIDA::IHistogram1D *histo){
-    
+ 
     if(binMin > binMax) throw Error
       ("Cannot add a binned histogram where the lower bin edge is above the upper edge");
-    
+ 
     _histosByUpperBound[binMax] = histo;
     _histosByLowerBound[binMin] = histo;
     bool found = false;
@@ -26,11 +26,11 @@
         histIt != _histos.end() && !found; ++histIt){
       if((*histIt)==histo) found = true;
     }
-    
+ 
     if(!found){
       _histos.push_back(histo);
     }
-    
+ 
     return *this;
   }
 ////////////////////////////////////////////////////////////////////////////////
@@ -39,15 +39,15 @@
                                                      const T &val,
                                                      const double &weight){
 
-    typename map<T, AIDA::IHistogram1D*>::iterator histIt = 
+    typename map<T, AIDA::IHistogram1D*>::iterator histIt =
       _histosByUpperBound.upper_bound(bin);
     //check that the bin is not out of range
     if(histIt == _histosByUpperBound.end()){
       return 0;
     }
-    
+ 
     AIDA::IHistogram1D* histo = histIt->second;
-    
+ 
     histIt = _histosByLowerBound.lower_bound(bin);
     //no need to check going beyond the upper bound if we already passed above
     //(given that upper bound > lower bound is checked)
@@ -55,17 +55,17 @@
     if(histIt == _histosByLowerBound.begin()){
       return 0;
     }
-    //lowerbound actually gives us the iterator one above the nearest element, 
-    //so decrement it.  
+    //lowerbound actually gives us the iterator one above the nearest element,
+    //so decrement it.
     //This is safe because we already checked we're not at the start!
     --histIt;
-    
+ 
     if(histo!=histIt->second){
       return 0;
     }
-    
+ 
     histo->fill(val, weight);
-    
+ 
     return histo;
   }
 ////////////////////////////////////////////////////////////////////////////////
@@ -73,7 +73,7 @@
   template class BinnedHistogram<double>;
   template class BinnedHistogram<int>;
   template class BinnedHistogram<float>;
-  
+
 }
 ////////////////////////////////////////////////////////////////////////////////
 

Modified: trunk/src/Tools/Logging.cc
==============================================================================
--- trunk/src/Tools/Logging.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/Logging.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,7 +16,7 @@
   bool Log::useShellColors = true;
 
 
-  Log::Log(const string& name) 
+  Log::Log(const string& name)
     : _name(name), _level(INFO), _nostream(new ostream(0)) { }
 
 
@@ -37,10 +37,10 @@
   }
 
 
-  void Log::setLevel(const string& name, int level) { 
+  void Log::setLevel(const string& name, int level) {
     defaultLevels[name] = level;
     //cout << name << " -> " << level << endl;
-    _updateLevels(defaultLevels, existingLogs); 
+    _updateLevels(defaultLevels, existingLogs);
   }
 
 
@@ -99,7 +99,7 @@
     case ERROR:
       return "ERROR";
     default:
-      return "";     
+      return "";
     }
     //throw Error("Enum value was not a valid log level. How did that happen?");
   }
@@ -173,7 +173,7 @@
 
     out += " ";
     out += message;
-    
+ 
     return out;
   }
 

Modified: trunk/src/Tools/ParticleIdUtils.cc
==============================================================================
--- trunk/src/Tools/ParticleIdUtils.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/ParticleIdUtils.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -48,7 +48,7 @@
     }
 }
 
-// Ion numbers are +/- 10LZZZAAAI. 
+// Ion numbers are +/- 10LZZZAAAI.
 int Z( const int & pid )
 {
     // a proton can also be a Hydrogen nucleus
@@ -57,7 +57,7 @@
     return 0;
 }
 
-// Ion numbers are +/- 10LZZZAAAI. 
+// Ion numbers are +/- 10LZZZAAAI.
 int A( const int & pid )
 {
     // a proton can also be a Hydrogen nucleus
@@ -67,7 +67,7 @@
 }
 
 // if this is a nucleus (ion), get nLambda
-// Ion numbers are +/- 10LZZZAAAI. 
+// Ion numbers are +/- 10LZZZAAAI.
 int lambda( const int & pid )
 {
     // a proton can also be a Hydrogen nucleus
@@ -85,7 +85,7 @@
 {
     if( extraBits(pid) > 0 ) {
         if( isNucleus(pid) )   { return true; }
-        return false; 
+        return false;
     }
     if( isSUSY(pid) ) { return true; }
     if( isRhadron(pid) ) { return true; }
@@ -96,9 +96,9 @@
     // DiQuark signature
     if( isDiQuark(pid) ) { return true; }
     // fundamental particle
-    if( fundamentalID(pid) > 0 ) { 
-      if(pid > 0 ) { 
-        return true; 
+    if( fundamentalID(pid) > 0 ) {
+      if(pid > 0 ) {
+        return true;
       } else {
         // AB - disabled this to remove need for PID -> name lookup.
         //if( hasFundamentalAnti(pid) ) { return true; }
@@ -117,7 +117,7 @@
 //     // these are defined by the generator and therefore are always valid
 //     if( fundamentalID(pid) <= 100 && fundamentalID(pid) >= 80 ) { return true; }
 //     // check id's from 1 to 79
-//     if( fundamentalID(pid) > 0 && fundamentalID(pid) < 80 ) { 
+//     if( fundamentalID(pid) > 0 && fundamentalID(pid) < 80 ) {
 //        if( validParticleName(-pid) ) { return true; }
 //     }
 //     return false;
@@ -135,7 +135,7 @@
     if( aid == 150 || aid == 350 || aid == 510 || aid == 530 ) { return true; }
     // pomeron, etc.
     if( pid == 110 || pid == 990 || pid == 9990 ) { return true; }
-    if(    digit(nj,pid) > 0 && digit(nq3,pid) > 0 
+    if(    digit(nj,pid) > 0 && digit(nq3,pid) > 0
         && digit(nq2,pid) > 0 && digit(nq1,pid) == 0 ) {
         // check for illegal antiparticles
         if( digit(nq3,pid) == digit(nq2,pid) && pid < 0 ) {
@@ -154,7 +154,7 @@
     if( abspid(pid) <= 100 ) { return false; }
     if( fundamentalID(pid) <= 100 && fundamentalID(pid) > 0 ) { return false; }
     if( abspid(pid) == 2110 || abspid(pid) == 2210 ) { return true; }
-    if(    digit(nj,pid) > 0  && digit(nq3,pid) > 0 
+    if(    digit(nj,pid) > 0  && digit(nq3,pid) > 0
         && digit(nq2,pid) > 0 && digit(nq1,pid) > 0 ) { return true; }
     return false;
 }
@@ -165,12 +165,12 @@
     if( extraBits(pid) > 0 ) { return false; }
     if( abspid(pid) <= 100 ) { return false; }
     if( fundamentalID(pid) <= 100 && fundamentalID(pid) > 0 ) { return false; }
-    if(    digit(nj,pid) > 0  && digit(nq3,pid) == 0 
+    if(    digit(nj,pid) > 0  && digit(nq3,pid) == 0
         && digit(nq2,pid) > 0 && digit(nq1,pid) > 0 ) {  // diquark signature
-       // EvtGen uses the diquarks for quark pairs, so, for instance, 
+       // EvtGen uses the diquarks for quark pairs, so, for instance,
        //   5501 is a valid "diquark" for EvtGen
        //if( digit(nj) == 1 && digit(nq2) == digit(nq1) ) { 	// illegal
-       //   return false; 
+       //   return false;
        //} else {
           return true;
        //}
@@ -197,7 +197,7 @@
 
 //
 // This implements the 2006 Monte Carlo nuclear code scheme.
-// Ion numbers are +/- 10LZZZAAAI. 
+// Ion numbers are +/- 10LZZZAAAI.
 // AAA is A - total baryon number
 // ZZZ is Z - total charge
 // L is the total number of strange quarks.
@@ -318,16 +318,16 @@
 // jSpin returns 2J+1, where J is the total spin
 int  jSpin( const int & pid )
 {
-    if( fundamentalID(pid) > 0 ) { 
+    if( fundamentalID(pid) > 0 ) {
 	// some of these are known
 	int fund = fundamentalID(pid);
 	if( fund > 0 && fund < 7 ) return 2;
-	if( fund == 9 ) return 3; 
+	if( fund == 9 ) return 3;
 	if( fund > 10 && fund < 17 ) return 2;
 	if( fund > 20 && fund < 25 ) return 3;
-        return 0; 
-    } else if( extraBits(pid) > 0 ) { 
-        return 0; 
+        return 0;
+    } else if( extraBits(pid) > 0 ) {
+        return 0;
     }
     return abspid(pid)%10;
 }
@@ -340,7 +340,7 @@
     int js = digit(nj,pid);
     if( digit(n,pid) == 9 ) { return 0; }	// tentative ID
     //if( tent == 9 ) { return 0; }	// tentative assignment
-    if( inl == 0 && js >= 3 ) { 
+    if( inl == 0 && js >= 3 ) {
         return 1;
     } else if( inl == 0  && js == 1 ) {
         return 0;
@@ -364,7 +364,7 @@
     //int tent = digit(n,pid);
     int js = digit(nj,pid);
     if( digit(n,pid) == 9 ) { return 0; }	// tentative ID
-    if( inl == 0 && js == 3 ) { 
+    if( inl == 0 && js == 3 ) {
         return 0;
     } else if( inl == 0 && js == 5 ) {
         return 1;
@@ -452,7 +452,7 @@
     if( charge == 0 ) {
         return 0;
     } else if( pid < 0 ) {
-        charge = -charge; 
+        charge = -charge;
     }
     return charge;
 }

Modified: trunk/src/Tools/ParticleName.cc
==============================================================================
--- trunk/src/Tools/ParticleName.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/ParticleName.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -3,10 +3,10 @@
 // ParticleName.cc
 // Author: Lynn Garren and Walter Brown
 //
-//  Create a map that gives a standard name for each pre-defined 
-//  particle ID number.   Also create a map for the reverse lookup of 
-//  the ID number from a string.  These maps are initialized if and only if 
-//  the public functions are called. Because the maps are static, 
+//  Create a map that gives a standard name for each pre-defined
+//  particle ID number.   Also create a map for the reverse lookup of
+//  the ID number from a string.  These maps are initialized if and only if
+//  the public functions are called. Because the maps are static,
 //  the initialization happens only once.
 //
 //  The user NEVER calls ParticleNameInit()
@@ -46,11 +46,11 @@
 
    typedef ParticleIdMap::const_iterator      idIterator;
    typedef ParticleLookupMap::const_iterator nameIterator;
-   
+
    ParticleNameMap(ParticleIdMap m1,ParticleLookupMap m2)
    : itsNameMap(m1), itsLookupMap(m2) {}
    ~ParticleNameMap() {}
-   
+
    ParticleIdMap       nameMap()    const { return itsNameMap; }
    ParticleLookupMap lookupMap()  const { return itsLookupMap; }
    idIterator   begin()               const { return itsNameMap.begin(); }
@@ -61,14 +61,14 @@
    nameIterator findString( const std::string & s) const { return itsLookupMap.find(s); }
 
 private:
-   
+
    ParticleIdMap       itsNameMap;
    ParticleLookupMap itsLookupMap;
-   
+
    // copies are not allowed
    ParticleNameMap( const ParticleNameMap & );
    ParticleNameMap & operator = ( const ParticleNameMap & );
-   
+
 };
 
 namespace {	// ParticleNameInit and ParticleNameMap are private
@@ -305,12 +305,12 @@
       {    9920022, "remnant photon" },
       {    9922212, "remnant nucleon" },
       {   -9922212, "remnant nucleon~" },
-      {    9900441, "cc~[1S08]" },     
-      {    9910441, "cc~[3P08]" },     
-      {    9900443, "cc~[3S18]" },     
-      {    9900551, "bb~[1S08]" },     
-      {    9910551, "bb~[3P08]" },     
-      {    9900553, "bb~[3S18]" },    
+      {    9900441, "cc~[1S08]" },
+      {    9910441, "cc~[3P08]" },
+      {    9900443, "cc~[3S18]" },
+      {    9900551, "bb~[1S08]" },
+      {    9910551, "bb~[3P08]" },
+      {    9900553, "bb~[3S18]" },
       {       1103, "dd_1" },
       {      -1103, "dd_1~" },
       {       2101, "ud_0" },
@@ -1683,7 +1683,7 @@
     os << std::endl;
 
     // simple: static  PartcleIdMap const &  pmap = getPartcleIdMap();
-    // simple: for( PartcleIdMap::const_iterator cit = pmap.begin(), mend = pmap.end(); 
+    // simple: for( PartcleIdMap::const_iterator cit = pmap.begin(), mend = pmap.end();
     // simple:                                 cit != mend;
 	// simple: 			  ++cit ) {
 	// simple: os << "  PDT number: " ;
@@ -1822,5 +1822,5 @@
     }
     return;
 }  // listParticleNames()
-    
+ 
   }}

Modified: trunk/src/Tools/RivetAIDA.cc
==============================================================================
--- trunk/src/Tools/RivetAIDA.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/RivetAIDA.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -35,17 +35,17 @@
         return file;
       }
     }
-    throw Rivet::Error("Couldn't find ref data file '" + papername + ".aida" + 
+    throw Rivet::Error("Couldn't find ref data file '" + papername + ".aida" +
                        " in $RIVET_REF_PATH, " + getRivetDataPath() + ", or .");
     return "";
   }
-  
-  
+
+
 
   const map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername) {
     // Get filename
     const string xmlfile = getDatafilePath(papername);
-    
+ 
     // Open AIDA XML file
     TiXmlDocument doc(xmlfile);
     doc.LoadFile();
@@ -55,11 +55,11 @@
       cerr << err << endl;
       throw Error(err);
     }
-    
+ 
     // Return value, to be populated
     map<string, vector<DPSXPoint> > rtn;
-    
-    try {      
+ 
+    try {
       // Walk down tree to get to the <paper> element
       const TiXmlNode* aidaN = doc.FirstChild("aida");
       if (!aidaN) throw Error("Couldn't get <aida> root element");
@@ -72,7 +72,7 @@
           cerr << "Skipping non-reference histogram " << plotname << endl;
           continue;
         }
-        
+     
         /// @todo Check that "path" matches filename
         vector<DPSXPoint> points;
         for (const TiXmlNode* dpN = dpsN->FirstChild("dataPoint"); dpN; dpN = dpN->NextSibling()) {
@@ -80,8 +80,8 @@
           if (xMeasN) {
             const TiXmlElement* xMeasE = xMeasN->ToElement();
             const string centreStr = xMeasE->Attribute("value");
-            const string errplusStr = xMeasE->Attribute("errorPlus"); 
-            const string errminusStr = xMeasE->Attribute("errorMinus"); 
+            const string errplusStr = xMeasE->Attribute("errorPlus");
+            const string errminusStr = xMeasE->Attribute("errorMinus");
             //if (!centreStr) throw Error("Couldn't get a valid bin centre");
             //if (!errplusStr) throw Error("Couldn't get a valid bin err+");
             //if (!errminusStr) throw Error("Couldn't get a valid bin err-");
@@ -98,26 +98,26 @@
             /// @todo Throw an exception here?
           }
         }
-        
+     
         // Add to the map
         rtn[plotname] = points;
       }
-      
+   
     }
     // Write out the error
-    /// @todo Rethrow as a general XML failure. 
+    /// @todo Rethrow as a general XML failure.
     catch (std::exception& e) {
       cerr << e.what() << endl;
       throw;
     }
-    
+ 
     // Return
     return rtn;
   }
-    
+ 
 
 
-  const map<string, BinEdges> 
+  const map<string, BinEdges>
   getBinEdges(string papername) {
     const map<string, vector<DPSXPoint> > xpoints = getDPSXValsErrs(papername);
     return getBinEdges(xpoints);
@@ -125,7 +125,7 @@
 
 
 
-  const map<string, BinEdges> 
+  const map<string, BinEdges>
   getBinEdges(const map<string, vector<DPSXPoint> >& xpoints) {
 
     map<string, BinEdges> rtn;
@@ -158,7 +158,7 @@
       //cout << "@@@ " << edges << endl;
 
       // Add to the map
-      rtn[plotname] = BinEdges(edges.begin(), edges.end()); 
+      rtn[plotname] = BinEdges(edges.begin(), edges.end());
     }
 
     // Return

Modified: trunk/src/Tools/TinyXML/tinyxml.cpp
==============================================================================
--- trunk/src/Tools/TinyXML/tinyxml.cpp	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/TinyXML/tinyxml.cpp	Thu Nov 19 15:02:51 2009	(r2080)
@@ -38,7 +38,7 @@
 	{
 		unsigned char c = (unsigned char) str[i];
 
-		if (    c == '&' 
+		if (    c == '&'
 		     && i < ( (int)str.length() - 2 )
 			 && str[i+1] == '#'
 			 && str[i+2] == 'x' )
@@ -142,7 +142,7 @@
 void TiXmlNode::CopyTo( TiXmlNode* target ) const
 {
 	target->SetValue (value.c_str() );
-	target->userData = userData; 
+	target->userData = userData;
 }
 
 
@@ -371,7 +371,7 @@
 }
 
 
-const TiXmlNode* TiXmlNode::NextSibling( const char * _value ) const 
+const TiXmlNode* TiXmlNode::NextSibling( const char * _value ) const
 {
 	const TiXmlNode* node;
 	for ( node = next; node; node = node->next )
@@ -487,7 +487,7 @@
 }
 
 
-TiXmlElement::TiXmlElement( const std::string& _value ) 
+TiXmlElement::TiXmlElement( const std::string& _value )
 	: TiXmlNode( TiXmlNode::ELEMENT )
 {
 	firstChild = lastChild = 0;
@@ -784,7 +784,7 @@
 	// superclass:
 	TiXmlNode::CopyTo( target );
 
-	// Element class: 
+	// Element class:
 	// Clone the attributes, then clone the children.
 	const TiXmlAttribute* attribute = 0;
 	for(	attribute = attributeSet.First();
@@ -803,7 +803,7 @@
 
 bool TiXmlElement::Accept( TiXmlVisitor* visitor ) const
 {
-	if ( visitor->VisitEnter( *this, attributeSet.First() ) ) 
+	if ( visitor->VisitEnter( *this, attributeSet.First() ) )
 	{
 		for ( const TiXmlNode* node=FirstChild(); node; node=node->NextSibling() )
 		{
@@ -928,7 +928,7 @@
 
 bool TiXmlDocument::LoadFile( FILE* file, TiXmlEncoding encoding )
 {
-	if ( !file ) 
+	if ( !file )
 	{
 		SetError( TIXML_ERROR_OPENING_FILE, 0, 0, TIXML_ENCODING_UNKNOWN );
 		return false;
@@ -960,13 +960,13 @@
 	// 2.11 End-of-Line Handling
 	// <snip>
 	// <quote>
-	// ...the XML processor MUST behave as if it normalized all line breaks in external 
-	// parsed entities (including the document entity) on input, before parsing, by translating 
-	// both the two-character sequence #xD #xA and any #xD that is not followed by #xA to 
+	// ...the XML processor MUST behave as if it normalized all line breaks in external
+	// parsed entities (including the document entity) on input, before parsing, by translating
+	// both the two-character sequence #xD #xA and any #xD that is not followed by #xA to
 	// a single #xA character.
 	// </quote>
 	//
-	// It is not clear fgets does that, and certainly isn't clear it works cross platform. 
+	// It is not clear fgets does that, and certainly isn't clear it works cross platform.
 	// Generally, you expect fgets to translate from the convention of the OS to the c/unix
 	// convention, and not work generally.
 
@@ -1057,7 +1057,7 @@
 
 bool TiXmlDocument::SaveFile( FILE* fp ) const
 {
-	if ( useMicrosoftBOM ) 
+	if ( useMicrosoftBOM )
 	{
 		const unsigned char TIXML_UTF_LEAD_0 = 0xefU;
 		const unsigned char TIXML_UTF_LEAD_1 = 0xbbU;
@@ -1717,12 +1717,12 @@
 		attrib->Print( 0, 0, &buffer );
 	}
 
-	if ( !element.FirstChild() ) 
+	if ( !element.FirstChild() )
 	{
 		buffer += " />";
 		DoLineBreak();
 	}
-	else 
+	else
 	{
 		buffer += ">";
 		if (    element.FirstChild()->ToText()
@@ -1745,11 +1745,11 @@
 bool TiXmlPrinter::VisitExit( const TiXmlElement& element )
 {
 	--depth;
-	if ( !element.FirstChild() ) 
+	if ( !element.FirstChild() )
 	{
 		// nothing.
 	}
-	else 
+	else
 	{
 		if ( simpleTextPrint )
 		{

Modified: trunk/src/Tools/TinyXML/tinyxmlerror.cpp
==============================================================================
--- trunk/src/Tools/TinyXML/tinyxmlerror.cpp	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/TinyXML/tinyxmlerror.cpp	Thu Nov 19 15:02:51 2009	(r2080)
@@ -2,12 +2,12 @@
 www.sourceforge.net/projects/tinyxml
 Original code (2.0 and earlier )copyright (c) 2000-2006 Lee Thomason (www.grinninglizard.com)
 
-This software is provided 'as-is', without any express or implied 
-warranty. In no event will the authors be held liable for any 
+This software is provided 'as-is', without any express or implied
+warranty. In no event will the authors be held liable for any
 damages arising from the use of this software.
 
-Permission is granted to anyone to use this software for any 
-purpose, including commercial applications, and to alter it and 
+Permission is granted to anyone to use this software for any
+purpose, including commercial applications, and to alter it and
 redistribute it freely, subject to the following restrictions:
 
 1. The origin of this software must not be misrepresented; you must

Modified: trunk/src/Tools/TinyXML/tinyxmlparser.cpp
==============================================================================
--- trunk/src/Tools/TinyXML/tinyxmlparser.cpp	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/TinyXML/tinyxmlparser.cpp	Thu Nov 19 15:02:51 2009	(r2080)
@@ -2,23 +2,23 @@
 www.sourceforge.net/projects/tinyxml
 Original code (2.0 and earlier )copyright (c) 2000-2002 Lee Thomason (www.grinninglizard.com)
 
-This software is provided 'as-is', without any express or implied 
-warranty. In no event will the authors be held liable for any 
+This software is provided 'as-is', without any express or implied
+warranty. In no event will the authors be held liable for any
 damages arising from the use of this software.
 
-Permission is granted to anyone to use this software for any 
-purpose, including commercial applications, and to alter it and 
+Permission is granted to anyone to use this software for any
+purpose, including commercial applications, and to alter it and
 redistribute it freely, subject to the following restrictions:
 
-1. The origin of this software must not be misrepresented; you must 
+1. The origin of this software must not be misrepresented; you must
 not claim that you wrote the original software. If you use this
 software in a product, an acknowledgment in the product documentation
 would be appreciated but is not required.
 
-2. Altered source versions must be plainly marked as such, and 
+2. Altered source versions must be plainly marked as such, and
 must not be misrepresented as being the original software.
 
-3. This notice may not be removed or altered from any source 
+3. This notice may not be removed or altered from any source
 distribution.
 */
 
@@ -40,7 +40,7 @@
 // Note tha "PutString" hardcodes the same list. This
 // is less flexible than it appears. Changing the entries
 // or order will break putstring.	
-TiXmlBase::Entity TiXmlBase::entity[ NUM_ENTITY ] = 
+TiXmlBase::Entity TiXmlBase::entity[ NUM_ENTITY ] =
 {
 	{ "&amp;",  5, '&' },
 	{ "&lt;",   4, '<' },
@@ -57,13 +57,13 @@
 // Beware of the non-characters in UTF-8:	
 //				ef bb bf (Microsoft "lead bytes")
 //				ef bf be
-//				ef bf bf 
+//				ef bf bf
 
 const unsigned char TIXML_UTF_LEAD_0 = 0xefU;
 const unsigned char TIXML_UTF_LEAD_1 = 0xbbU;
 const unsigned char TIXML_UTF_LEAD_2 = 0xbfU;
 
-const int TiXmlBase::utf8ByteTable[256] = 
+const int TiXmlBase::utf8ByteTable[256] =
 {
 	//	0	1	2	3	4	5	6	7	8	9	a	b	c	d	e	f
 		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x00
@@ -75,9 +75,9 @@
 		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x60
 		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x70	End of ASCII range
 		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x80 0x80 to 0xc1 invalid
-		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x90 
-		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0xa0 
-		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0xb0 
+		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0x90
+		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0xa0
+		1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	1,	// 0xb0
 		1,	1,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	// 0xc0 0xc2 to 0xdf 2 byte
 		2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	2,	// 0xd0
 		3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	3,	// 0xe0 0xe0 to 0xef 3 byte
@@ -91,7 +91,7 @@
 	const unsigned long BYTE_MARK = 0x80;
 	const unsigned long FIRST_BYTE_MARK[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC };
 
-	if (input < 0x80) 
+	if (input < 0x80)
 		*length = 1;
 	else if ( input < 0x800 )
 		*length = 2;
@@ -105,22 +105,22 @@
 	output += *length;
 
 	// Scary scary fall throughs.
-	switch (*length) 
+	switch (*length)
 	{
 		case 4:
-			--output; 
-			*output = (char)((input | BYTE_MARK) & BYTE_MASK); 
+			--output;
+			*output = (char)((input | BYTE_MARK) & BYTE_MASK);
 			input >>= 6;
 		case 3:
-			--output; 
-			*output = (char)((input | BYTE_MARK) & BYTE_MASK); 
+			--output;
+			*output = (char)((input | BYTE_MARK) & BYTE_MASK);
 			input >>= 6;
 		case 2:
-			--output; 
-			*output = (char)((input | BYTE_MARK) & BYTE_MASK); 
+			--output;
+			*output = (char)((input | BYTE_MARK) & BYTE_MASK);
 			input >>= 6;
 		case 1:
-			--output; 
+			--output;
 			*output = (char)(input | FIRST_BYTE_MARK[*length]);
 	}
 }
@@ -130,7 +130,7 @@
 {
 	// This will only work for low-ascii, everything else is assumed to be a valid
 	// letter. I'm not sure this is the best approach, but it is quite tricky trying
-	// to figure out alhabetical vs. not across encoding. So take a very 
+	// to figure out alhabetical vs. not across encoding. So take a very
 	// conservative approach.
 
 //	if ( encoding == TIXML_ENCODING_UTF8 )
@@ -151,7 +151,7 @@
 {
 	// This will only work for low-ascii, everything else is assumed to be a valid
 	// letter. I'm not sure this is the best approach, but it is quite tricky trying
-	// to figure out alhabetical vs. not across encoding. So take a very 
+	// to figure out alhabetical vs. not across encoding. So take a very
 	// conservative approach.
 
 //	if ( encoding == TIXML_ENCODING_UTF8 )
@@ -325,7 +325,7 @@
 			
 			// Skip the stupid Microsoft UTF-8 Byte order marks
 			if (	*(pU+0)==TIXML_UTF_LEAD_0
-				 && *(pU+1)==TIXML_UTF_LEAD_1 
+				 && *(pU+1)==TIXML_UTF_LEAD_1
 				 && *(pU+2)==TIXML_UTF_LEAD_2 )
 			{
 				p += 3;
@@ -411,12 +411,12 @@
 	// After that, they can be letters, underscores, numbers,
 	// hyphens, or colons. (Colons are valid ony for namespaces,
 	// but tinyxml can't tell namespaces from names.)
-	if (    p && *p 
+	if (    p && *p
 		 && ( IsAlpha( (unsigned char) *p, encoding ) || *p == '_' ) )
 	{
 		const char* start = p;
 		while(		p && *p
-				&&	(		IsAlphaNum( (unsigned char ) *p, encoding ) 
+				&&	(		IsAlphaNum( (unsigned char ) *p, encoding )
 						 || *p == '_'
 						 || *p == '-'
 						 || *p == '.'
@@ -467,7 +467,7 @@
 					ucs += mult * (*q - 'a' + 10);
 				else if ( *q >= 'A' && *q <= 'F' )
 					ucs += mult * (*q - 'A' + 10 );
-				else 
+				else
 					return 0;
 				mult *= 16;
 				--q;
@@ -490,7 +490,7 @@
 			{
 				if ( *q >= '0' && *q <= '9' )
 					ucs += mult * (*q - '0');
-				else 
+				else
 					return 0;
 				mult *= 10;
 				--q;
@@ -569,10 +569,10 @@
 	return false;
 }
 
-const char* TiXmlBase::ReadText(	const char* p, 
-									TIXML_STRING * text, 
-									bool trimWhiteSpace, 
-									const char* endTag, 
+const char* TiXmlBase::ReadText(	const char* p,
+									TIXML_STRING * text,
+									bool trimWhiteSpace,
+									const char* endTag,
 									bool caseInsensitive,
 									TiXmlEncoding encoding )
 {
@@ -629,7 +629,7 @@
 			}
 		}
 	}
-	if ( p ) 
+	if ( p )
 		p += strlen( endTag );
 	return p;
 }
@@ -644,7 +644,7 @@
 	// This "pre-streaming" will never read the closing ">" so the
 	// sub-tag can orient itself.
 
-	if ( !StreamTo( in, '<', tag ) ) 
+	if ( !StreamTo( in, '<', tag ) )
 	{
 		SetError( TIXML_ERROR_PARSING_EMPTY, 0, 0, TIXML_ENCODING_UNKNOWN );
 		return;
@@ -666,7 +666,7 @@
 
 		if ( in->good() )
 		{
-			// We now have something we presume to be a node of 
+			// We now have something we presume to be a node of
 			// some sort. Identify it, and call the node to
 			// continue streaming.
 			TiXmlNode* node = Identify( tag->c_str() + tagIndex, TIXML_DEFAULT_ENCODING );
@@ -774,7 +774,7 @@
 				encoding = TIXML_ENCODING_UTF8;
 			else if ( StringEqual( enc, "UTF8", true, TIXML_ENCODING_UNKNOWN ) )
 				encoding = TIXML_ENCODING_UTF8;	// incorrect, but be nice
-			else 
+			else
 				encoding = TIXML_ENCODING_LEGACY;
 		}
 
@@ -829,7 +829,7 @@
 		return 0;
 	}
 
-	// What is this thing? 
+	// What is this thing?
 	// - Elements start with a letter or underscore, but xml is reserved.
 	// - Comments: <!--
 	// - Decleration: <?xml
@@ -926,7 +926,7 @@
 	// Okay...if we are a "/>" tag, then we're done. We've read a complete tag.
 	// If not, identify and stream.
 
-	if (    tag->at( tag->length() - 1 ) == '>' 
+	if (    tag->at( tag->length() - 1 ) == '>'
 		 && tag->at( tag->length() - 2 ) == '/' )
 	{
 		// All good!
@@ -944,7 +944,7 @@
 			StreamWhiteSpace( in, tag );
 
 			// Do we have text?
-			if ( in->good() && in->peek() != '<' ) 
+			if ( in->good() && in->peek() != '<' )
 			{
 				// Yep, text.
 				TiXmlText text( "" );
@@ -1197,8 +1197,8 @@
 				LinkEndChild( textNode );
 			else
 				delete textNode;
-		} 
-		else 
+		}
+		else
 		{
 			// We hit a '<'
 			// Have we hit a new element or an end tag? This could also be
@@ -1305,7 +1305,7 @@
 
 		(*tag) += (char) c;
 
-		if ( c == '>' 
+		if ( c == '>'
 			 && tag->at( tag->length() - 2 ) == '-'
 			 && tag->at( tag->length() - 3 ) == '-' )
 		{
@@ -1407,7 +1407,7 @@
 		{
 			if ( *p == SINGLE_QUOTE || *p == DOUBLE_QUOTE ) {
 				// [ 1451649 ] Attribute values with trailing quotes not handled correctly
-				// We did not have an opening quote but seem to have a 
+				// We did not have an opening quote but seem to have a
 				// closing one. Give up and throw an error.
 				if ( document ) document->SetError( TIXML_ERROR_READING_ATTRIBUTES, p, data, encoding );
 				return 0;
@@ -1425,7 +1425,7 @@
 	while ( in->good() )
 	{
 		int c = in->peek();	
-		if ( !cdata && (c == '<' ) ) 
+		if ( !cdata && (c == '<' ) )
 		{
 			return;
 		}
@@ -1446,7 +1446,7 @@
 				// terminator of cdata.
 				return;
 			}
-		}    
+		}
 	}
 }
 
@@ -1485,7 +1485,7 @@
 			++p;
 		}
 
-		TIXML_STRING dummy; 
+		TIXML_STRING dummy;
 		p = ReadText( p, &dummy, false, endTag, false, encoding );
 		return p;
 	}

Modified: trunk/src/Tools/Utils.cc
==============================================================================
--- trunk/src/Tools/Utils.cc	Thu Nov 19 14:02:18 2009	(r2079)
+++ trunk/src/Tools/Utils.cc	Thu Nov 19 15:02:51 2009	(r2080)
@@ -16,13 +16,13 @@
 
   // Return distance of closest approach from track to given (primary) vertex position.
   double get2dClosestApproach(const HepMC::GenParticle& track, const Vector3& vtx3pos) {
-    /// @todo Whoa! - implicit constructors from hell! 
+    /// @todo Whoa! - implicit constructors from hell!
     HepMC::FourVector trkvec = track;
-    HepMC::ThreeVector trk3vec = trkvec;    
+    HepMC::ThreeVector trk3vec = trkvec;
     HepMC::ThreeVector trk3pos = track.production_vertex()->position();
-    
+ 
     Vector3 diff(vtx3pos.x()-trk3pos.x(), vtx3pos.y()-trk3pos.y(), vtx3pos.z()-trk3pos.z());
-    
+ 
     // Impact parameter in the transverse plane
     const double d = fabs( trk3vec.x()*diff.y() - trk3vec.y()*diff.x() )
       / sqrt( sqr(trk3vec.x()) + sqr(trk3vec.y()) );
@@ -31,16 +31,16 @@
 
 
   // Return distance of closest approach from track to given (primary) vertex position.
-  double get3dClosestApproach(const HepMC::GenParticle& track, const Vector3& vtx3pos) {    
+  double get3dClosestApproach(const HepMC::GenParticle& track, const Vector3& vtx3pos) {
     HepMC::FourVector trkvec = track;
     HepMC::ThreeVector trk3vec = trkvec;
     HepMC::FourVector trkpos = track.production_vertex()->position();
     HepMC::ThreeVector trk3pos = trkpos;
     Vector3 diff(vtx3pos.x()-trk3pos.x(), vtx3pos.y()-trk3pos.y(), vtx3pos.z()-trk3pos.z());
-    
+ 
     // Impact parameter in 3 dimensions
     const double mag = sqrt( sqr(trk3vec.x()) + sqr(trk3vec.y()) + sqr(trk3vec.z()) );
-    const double d = sqrt( sqr(trk3vec.y()*diff.z()-trk3vec.z()*diff.y()) - 
+    const double d = sqrt( sqr(trk3vec.y()*diff.z()-trk3vec.z()*diff.y()) -
                            sqr(trk3vec.x()*diff.z()-trk3vec.z()*diff.x()) +
                            sqr(trk3vec.x()*diff.y()-trk3vec.y()*diff.x()) ) / mag;
     return d;
@@ -49,18 +49,18 @@
 
   /// Return Decay Length Significance between two vertices in transverse plane
   double get2dDecayLength(const Vector3& vtx1, const Vector3& vtx2, const FourMomentum& jetaxis) {
-    Vector3 diff = vtx1 - vtx2; 
-    const double l = (jetaxis.px()*diff.x() + jetaxis.py()*diff.y() ) 
+    Vector3 diff = vtx1 - vtx2;
+    const double l = (jetaxis.px()*diff.x() + jetaxis.py()*diff.y() )
       / sqrt(sqr(jetaxis.px())+sqr(jetaxis.py()));
     return l;
   }
 
 
 
-  /// Return 3 dimensional Decay Length Significance between vertices 
+  /// Return 3 dimensional Decay Length Significance between vertices
   double get3dDecayLength(const Vector3& vtx1, const Vector3& vtx2, const FourMomentum& jetaxis) {
     Vector3 diff = vtx1 - vtx2;
-    const double l = (jetaxis.px()*diff.x() +jetaxis.py()*diff.y() +jetaxis.pz()*diff.z()) 
+    const double l = (jetaxis.px()*diff.x() +jetaxis.py()*diff.y() +jetaxis.pz()*diff.z())
       / sqrt(sqr(jetaxis.px())+sqr(jetaxis.py())+sqr(jetaxis.pz()));
     return l;
   }


More information about the Rivet-svn mailing list