[Rivet-svn] r2132 - in trunk: . bin include/Rivet src/Analyses src/Core

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Fri Dec 4 01:26:27 GMT 2009


Author: buckley
Date: Fri Dec  4 01:26:26 2009
New Revision: 2132

Log:
Providing Analysis::sqrtS() and Analysis::beams(), and making sure they're available by the time the init methods are called

Modified:
   trunk/ChangeLog
   trunk/bin/rivet
   trunk/include/Rivet/Analysis.hh
   trunk/include/Rivet/AnalysisHandler.hh
   trunk/src/Analyses/MC_LHC_WANALYSIS.cc
   trunk/src/Analyses/OPAL_2004_S6132243.cc
   trunk/src/Core/Analysis.cc
   trunk/src/Core/AnalysisHandler.cc

Modified: trunk/ChangeLog
==============================================================================
--- trunk/ChangeLog	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/ChangeLog	Fri Dec  4 01:26:26 2009	(r2132)
@@ -1,3 +1,8 @@
+2009-12-04  Andy Buckley  <andy at insectnation.org>
+
+	* Providing Analysis::sqrtS() and Analysis::beams(), and making
+	sure they're available by the time the init methods are called.
+
 2009-12-02  Andy Buckley  <andy at insectnation.org>
 
 	* Adding passing of first event sqrt(s) and beams to analysis handler.

Modified: trunk/bin/rivet
==============================================================================
--- trunk/bin/rivet	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/bin/rivet	Fri Dec  4 01:26:26 2009	(r2132)
@@ -330,24 +330,6 @@
     HEPMCFILES = ["-"]
 
 
-## Set up analysis handler
-RUNNAME = opts.RUN_NAME or ""
-ah = rivet.AnalysisHandler(opts.HISTOFILE, RUNNAME)
-if opts.ALL_ANALYSES:
-    opts.ANALYSES = all_analyses
-for a in opts.ANALYSES:
-    a_up = a.upper()
-    ## Print warning message and exit if not a valid analysis name
-    if not a_up in all_analyses:
-        print "'%s' is not a valid analysis. Available analyses are:" % a_up
-        for aa in all_analyses:
-            print "    %s" % aa
-        sys.exit(1)
-    logging.debug("Adding analysis '%s'" % a_up)
-    ah.addAnalysis(a_up)
-ah.init()
-        
-
 ## Event number logging
 def logNEvt(n, starttime, maxevtnum):
     nevtloglevel = logging.DEBUG
@@ -374,6 +356,22 @@
                     % (n, timeelapsed, timeleft, eta))
 
 
+## Set up analysis handler
+RUNNAME = opts.RUN_NAME or ""
+ah = rivet.AnalysisHandler(opts.HISTOFILE, RUNNAME)
+if opts.ALL_ANALYSES:
+    opts.ANALYSES = all_analyses
+for a in opts.ANALYSES:
+    a_up = a.upper()
+    ## Print warning message and exit if not a valid analysis name
+    if not a_up in all_analyses:
+        print "'%s' is not a valid analysis. Available analyses are:" % a_up
+        for aa in all_analyses:
+            print "    %s" % aa
+        sys.exit(1)
+    logging.debug("Adding analysis '%s'" % a_up)
+    ah.addAnalysis(a_up)
+
 ## Read and process events
 run = rivet.Run(ah)
 if opts.CROSS_SECTION is not None:
@@ -393,6 +391,9 @@
     log.error("Failed to initialise on event file %s" % evtfile)
     sys.exit(2)
 
+## Now that we have some idea of the run contents, init the analysis handler
+ah.init()
+
 ## Event loop
 starttime = time.time()
 EVTNUM = 0

Modified: trunk/include/Rivet/Analysis.hh
==============================================================================
--- trunk/include/Rivet/Analysis.hh	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/include/Rivet/Analysis.hh	Fri Dec  4 01:26:26 2009	(r2132)
@@ -49,10 +49,12 @@
     /// The AnalysisHandler is a friend.
     friend class AnalysisHandler;
 
+
   public:
 
     /// @name Standard constructors and destructors.
     //@{
+
     /// The default constructor.
     //Analysis();
 
@@ -61,8 +63,10 @@
 
     /// The destructor.
     virtual ~Analysis();
+
     //@}
 
+
   public:
 
     /// @name Main analysis methods
@@ -84,8 +88,10 @@
     /// overridden function must make sure it first calls the base class
     /// function.
     virtual void finalize() = 0;
+
     //@}
 
+
   public:
 
     /// @name Metadata
@@ -140,8 +146,8 @@
     /// Collider on which the experiment ran.
     virtual std::string collider() const;
 
-    /// Incoming beams required by this analysis.
-    virtual const std::pair<ParticleName, ParticleName>& beams() const;
+    /// Return the pair of incoming beams required by this analysis.
+    virtual const BeamPair requiredBeams() const;
 
     /// Sets of valid beam energy pairs, in GeV
     virtual const std::vector<std::pair<double, double> >& energies() const;
@@ -161,10 +167,18 @@
     //@}
 
 
-  public:
+    /// @name Run conditions
 
-    /// Return the pair of incoming beams required by this analysis.
-    virtual const BeamPair requiredBeams() const;
+    /// Incoming beams for this run
+    const BeamPair& beams() const;
+
+    /// Centre of mass energy for this run
+    double sqrtS() const;
+
+    //@}
+
+
+  public:
 
     /// Is this analysis able to run on the supplied pair of beams?
     virtual bool isCompatible(const ParticleName& beam1, const ParticleName& beam2) const;
@@ -200,6 +214,10 @@
 
   protected:
 
+
+    /// Get a Log object based on the name() property of the calling analysis object.
+    Log& getLog() const;
+
     /// Get the process cross-section in pb. Throws if this hasn't been set.
     double crossSection() const;
  
@@ -207,9 +225,6 @@
     /// hasn't been set.
     double crossSectionPerEvent() const;
 
-    /// Get a Log object based on the name() property of the calling analysis object.
-    Log& getLog() const;
-
     /// Get the number of events seen (via the analysis handler). Use in the
     /// finalize phase only.
     size_t numEvents() const;
@@ -220,6 +235,7 @@
  
 
   protected:
+
     /// @name AIDA analysis infrastructure.
     //@{
     /// Access the AIDA analysis factory of the controlling AnalysisHandler object.
@@ -239,6 +255,7 @@
 
     /// Get the canonical histogram path for the named histogram in this analysis.
     const std::string histoPath(const std::string& hname) const;
+
     //@}
 
 
@@ -372,6 +389,7 @@
   protected:
 
     /// Set the colliding beam pair.
+    /// @deprecated Use .info file and AnalysisInfo class instead
     Analysis& setBeams(const ParticleName& beam1, const ParticleName& beam2);
 
     /// Declare whether this analysis needs to know the process cross-section from the generator.

Modified: trunk/include/Rivet/AnalysisHandler.hh
==============================================================================
--- trunk/include/Rivet/AnalysisHandler.hh	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/include/Rivet/AnalysisHandler.hh	Fri Dec  4 01:26:26 2009	(r2132)
@@ -103,6 +103,7 @@
 
     /// Set beam IDs for this run (as determined from first event)
     AnalysisHandler& setBeams(const BeamPair& beams) { 
+      getLog() << Log::DEBUG << "Setting run beams = " << beams << endl;
       _beams = beams;
       return *this;
     }
@@ -115,6 +116,7 @@
 
     /// Set energy for this run (as determined from first event)
     AnalysisHandler& setSqrtS(double sqrts) {
+      getLog() << Log::DEBUG << "Setting run sqrt(s) = " << sqrts << endl;
       _sqrts = sqrts;
       return *this;
     }

Modified: trunk/src/Analyses/MC_LHC_WANALYSIS.cc
==============================================================================
--- trunk/src/Analyses/MC_LHC_WANALYSIS.cc	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/src/Analyses/MC_LHC_WANALYSIS.cc	Fri Dec  4 01:26:26 2009	(r2132)
@@ -16,7 +16,6 @@
     MC_LHC_WANALYSIS() : Analysis("MC_LHC_WANALYSIS")
     {
       setNeedsCrossSection(true);
-      _sumWPass = 0.0;
     }
  
 
@@ -58,8 +57,8 @@
       _hist_wminusmass = bookHistogram1D("wminus-m", 40, 60, 100);
       //_hist_weta_asymm = bookProfile1D("asymm-eta-w", 20, -5.0, 5.0);
       //
-      // _hist_jetcount = bookHistogram1D("jet-n", 6, -0.5, 5.5);
-      // _hist_jetpt = bookHistogram1D("jet-pt", 50, 20, 100);
+      _hist_jetcount = bookHistogram1D("jet-n", 6, -0.5, 5.5);
+      _hist_jetpt = bookHistogram1D("jet-pt", 50, 20, 100);
     }
  
  
@@ -70,7 +69,6 @@
         vetoEvent;
       }
       const double weight = event.weight();
-      _sumWPass += weight;
  
       // Charged particles part
       const FinalState& cfs = applyProjection<FinalState>(event, "CFS");
@@ -120,15 +118,15 @@
       _hist_wpluscount->fill(n_wplus, weight);   
       _hist_wminuscount->fill(n_wminus, weight);
 
-      // // Jet part
-      // const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
-      // const Jets jets = fastjets.jetsByPt();
-      // cout << jets.size() << endl;
-      // _hist_jetcount->fill(jets.size(), weight);
-      // foreach (const Jet& j, jets) {
-      //   const double pT = j.momentum().pT();
-      //   _hist_jetpt->fill(pT/GeV, weight);
-      // }
+      // Jet part
+      const FastJets& fastjets = applyProjection<FastJets>(event, "Jets");
+      const Jets jets = fastjets.jetsByPt(10*GeV);
+      cout << jets.size() << endl;
+      _hist_jetcount->fill(jets.size(), weight);
+      foreach (const Jet& j, jets) {
+        const double pT = j.momentum().pT();
+        _hist_jetpt->fill(pT/GeV, weight);
+      }
 
     }
  
@@ -158,8 +156,8 @@
       scale(_hist_wmass, xsec_sumw);
       scale(_hist_wplusmass, xsec_sumw_plus);
       scale(_hist_wminusmass, xsec_sumw_minus);
-      // scale(_hist_jetcount, xsec_sumw);
-      // scale(_hist_jetpt, xsec_sumw);
+      scale(_hist_jetcount, xsec_sumw);
+      scale(_hist_jetpt, xsec_sumw);
     }
  
     //@}
@@ -167,13 +165,6 @@
  
   private:
 
-
-    /// @name Counters
-    //@{
-    double _sumWPass;
-    //@}
-
-
     /// @name Histograms
     //@{
     AIDA::IHistogram1D *_hist_chargemulti;
@@ -185,8 +176,8 @@
     AIDA::IHistogram1D *_hist_weta, *_hist_wpluseta, *_hist_wminuseta;
     AIDA::IHistogram1D *_hist_wphi, *_hist_wplusphi, *_hist_wminusphi;
     AIDA::IHistogram1D *_hist_wmass, *_hist_wplusmass, *_hist_wminusmass;
-    // AIDA::IHistogram1D *_hist_jetcount;
-    // AIDA::IHistogram1D *_hist_jetpt;
+    AIDA::IHistogram1D *_hist_jetcount;
+    AIDA::IHistogram1D *_hist_jetpt;
     //@}
 
   };

Modified: trunk/src/Analyses/OPAL_2004_S6132243.cc
==============================================================================
--- trunk/src/Analyses/OPAL_2004_S6132243.cc	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/src/Analyses/OPAL_2004_S6132243.cc	Fri Dec  4 01:26:26 2009	(r2132)
@@ -17,7 +17,9 @@
   public:
 
     /// Constructor
-    OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243") {
+    OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243"),
+                           _isqrts(-1), _sumPassedWeights(0.0)
+    {
       //
     }
 
@@ -25,6 +27,28 @@
     /// @name Analysis methods
     //@{
 
+    /// Energies: 91, 133, 177 (161-183), 197 (189-209) => index 0..4
+    int getHistIndex(double sqrts) {
+      int ih = -1;
+      if (inRange(sqrts/GeV, 89.9, 91.5)) {
+        ih = 0;
+      } else if (fuzzyEquals(sqrts/GeV, 133)) {
+        ih = 1;
+      } else if (fuzzyEquals(sqrts/GeV, 177)) { // (161-183)
+        ih = 2;
+      } else if (fuzzyEquals(sqrts/GeV, 197)) { // (189-209)
+        ih = 3;
+      } else {
+        stringstream ss;
+        ss << "Invalid energy for OPAL_2004 analysis: "
+           << sqrts/GeV << " GeV != 91, 133, 177, or 197 GeV";
+        throw Error(ss.str());
+      }
+      assert(ih >= 0);
+      return ih;
+    }
+
+
     void init() {
       // Projections
       addProjection(Beam(), "Beams");
@@ -37,80 +61,60 @@
       addProjection(thrust, "Thrust");
       addProjection(Hemispheres(thrust), "Hemispheres");
 
-      // Counters
-      _sumPassedWeights = 0;
+      // Get beam energy index
+      _isqrts = getHistIndex(sqrtS());
 
       // Book histograms
-      // Energies: 91, 133, 177 (161-183), 197 (189-209) => index 0..4
-      for (int isqrts = 0; isqrts < 4; ++isqrts) {
-        _hist1MinusT[isqrts]    = bookHistogram1D(1, 1, isqrts+1);
-        _histHemiMassH[isqrts]  = bookHistogram1D(2, 1, isqrts+1);
-        _histCParam[isqrts]     = bookHistogram1D(3, 1, isqrts+1);
-        _histHemiBroadT[isqrts] = bookHistogram1D(4, 1, isqrts+1);
-        _histHemiBroadW[isqrts] = bookHistogram1D(5, 1, isqrts+1);
-        _histY23Durham[isqrts]  = bookHistogram1D(6, 1, isqrts+1);
-        _histTMajor[isqrts]     = bookHistogram1D(7, 1, isqrts+1);
-        _histTMinor[isqrts]     = bookHistogram1D(8, 1, isqrts+1);
-        _histAplanarity[isqrts] = bookHistogram1D(9, 1, isqrts+1);
-        _histSphericity[isqrts] = bookHistogram1D(10, 1, isqrts+1);
-        _histOblateness[isqrts] = bookHistogram1D(11, 1, isqrts+1);
-        _histHemiMassL[isqrts]  = bookHistogram1D(12, 1, isqrts+1);
-        _histHemiBroadN[isqrts] = bookHistogram1D(13, 1, isqrts+1);
-        _histDParam[isqrts]     = bookHistogram1D(14, 1, isqrts+1);
-        //
-        _hist1MinusTMom[isqrts]    = bookHistogram1D(15, 1, isqrts+1);
-        _histHemiMassHMom[isqrts]  = bookHistogram1D(16, 1, isqrts+1);
-        _histCParamMom[isqrts]     = bookHistogram1D(17, 1, isqrts+1);
-        _histHemiBroadTMom[isqrts] = bookHistogram1D(18, 1, isqrts+1);
-        _histHemiBroadWMom[isqrts] = bookHistogram1D(19, 1, isqrts+1);
-        _histY23DurhamMom[isqrts]  = bookHistogram1D(20, 1, isqrts+1);
-        _histTMajorMom[isqrts]     = bookHistogram1D(21, 1, isqrts+1);
-        _histTMinorMom[isqrts]     = bookHistogram1D(22, 1, isqrts+1);
-        _histSphericityMom[isqrts] = bookHistogram1D(23, 1, isqrts+1);
-        _histOblatenessMom[isqrts] = bookHistogram1D(24, 1, isqrts+1);
-        _histHemiMassLMom[isqrts]  = bookHistogram1D(25, 1, isqrts+1);
-        _histHemiBroadNMom[isqrts] = bookHistogram1D(26, 1, isqrts+1);
-      }
+      _hist1MinusT[_isqrts]    = bookHistogram1D(1, 1, _isqrts+1);
+      _histHemiMassH[_isqrts]  = bookHistogram1D(2, 1, _isqrts+1);
+      _histCParam[_isqrts]     = bookHistogram1D(3, 1, _isqrts+1);
+      _histHemiBroadT[_isqrts] = bookHistogram1D(4, 1, _isqrts+1);
+      _histHemiBroadW[_isqrts] = bookHistogram1D(5, 1, _isqrts+1);
+      _histY23Durham[_isqrts]  = bookHistogram1D(6, 1, _isqrts+1);
+      _histTMajor[_isqrts]     = bookHistogram1D(7, 1, _isqrts+1);
+      _histTMinor[_isqrts]     = bookHistogram1D(8, 1, _isqrts+1);
+      _histAplanarity[_isqrts] = bookHistogram1D(9, 1, _isqrts+1);
+      _histSphericity[_isqrts] = bookHistogram1D(10, 1, _isqrts+1);
+      _histOblateness[_isqrts] = bookHistogram1D(11, 1, _isqrts+1);
+      _histHemiMassL[_isqrts]  = bookHistogram1D(12, 1, _isqrts+1);
+      _histHemiBroadN[_isqrts] = bookHistogram1D(13, 1, _isqrts+1);
+      _histDParam[_isqrts]     = bookHistogram1D(14, 1, _isqrts+1);
+      //
+      _hist1MinusTMom[_isqrts]    = bookHistogram1D(15, 1, _isqrts+1);
+      _histHemiMassHMom[_isqrts]  = bookHistogram1D(16, 1, _isqrts+1);
+      _histCParamMom[_isqrts]     = bookHistogram1D(17, 1, _isqrts+1);
+      _histHemiBroadTMom[_isqrts] = bookHistogram1D(18, 1, _isqrts+1);
+      _histHemiBroadWMom[_isqrts] = bookHistogram1D(19, 1, _isqrts+1);
+      _histY23DurhamMom[_isqrts]  = bookHistogram1D(20, 1, _isqrts+1);
+      _histTMajorMom[_isqrts]     = bookHistogram1D(21, 1, _isqrts+1);
+      _histTMinorMom[_isqrts]     = bookHistogram1D(22, 1, _isqrts+1);
+      _histSphericityMom[_isqrts] = bookHistogram1D(23, 1, _isqrts+1);
+      _histOblatenessMom[_isqrts] = bookHistogram1D(24, 1, _isqrts+1);
+      _histHemiMassLMom[_isqrts]  = bookHistogram1D(25, 1, _isqrts+1);
+      _histHemiBroadNMom[_isqrts] = bookHistogram1D(26, 1, _isqrts+1);
     }
 
 
     void analyze(const Event& event) {
-      const FinalState& cfs = applyProjection<FinalState>(event, "FS");
       // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
+      const FinalState& cfs = applyProjection<FinalState>(event, "FS");
       if (cfs.size() < 2) vetoEvent;
 
       // Increment passed-cuts weight sum
       const double weight = event.weight();
       _sumPassedWeights += weight;
 
-      // Get beams and average beam momentum
-      const double sqrts = applyProjection<Beam>(event, "Beams").sqrtS();
-
-      // Translate sqrt(s) into a histo index for the majority of histos
-      size_t ih = 5;
-      if (inRange(sqrts/GeV, 89.9, 91.5)) {
-        ih = 0;
-      } else if (fuzzyEquals(sqrts/GeV, 133)) {
-        ih = 1;
-      } else if (fuzzyEquals(sqrts/GeV, 177)) { // (161-183)
-        ih = 2;
-      } else if (fuzzyEquals(sqrts/GeV, 197)) { // (189-209)
-        ih = 3;
-      } else {
-        throw Error("Invalid energy for OPAL_2004 analysis! Require 91, 133, 177, or 197 GeV");
-      }
-
       // Thrusts
       const Thrust& thrust = applyProjection<Thrust>(event, "Thrust");
-      _hist1MinusT[ih]->fill(1-thrust.thrust(), weight);
-      _histTMajor[ih]->fill(thrust.thrustMajor(), weight);
-      _histTMinor[ih]->fill(thrust.thrustMinor(), weight);
-      _histOblateness[ih]->fill(thrust.oblateness(), weight);
+      _hist1MinusT[_isqrts]->fill(1-thrust.thrust(), weight);
+      _histTMajor[_isqrts]->fill(thrust.thrustMajor(), weight);
+      _histTMinor[_isqrts]->fill(thrust.thrustMinor(), weight);
+      _histOblateness[_isqrts]->fill(thrust.oblateness(), weight);
       for (int n = 1; n <= 5; ++n) {
-        _hist1MinusTMom[ih]->fill(n, pow(1-thrust.thrust(), n)*weight);
-        _histTMajorMom[ih]->fill(n, pow(thrust.thrustMajor(), n)*weight);
-        _histTMinorMom[ih]->fill(n, pow(thrust.thrustMinor(), n)*weight);
-        _histOblatenessMom[ih]->fill(n, pow(thrust.oblateness(), n)*weight);
+        _hist1MinusTMom[_isqrts]->fill(n, pow(1-thrust.thrust(), n)*weight);
+        _histTMajorMom[_isqrts]->fill(n, pow(thrust.thrustMajor(), n)*weight);
+        _histTMinorMom[_isqrts]->fill(n, pow(thrust.thrustMinor(), n)*weight);
+        _histOblatenessMom[_isqrts]->fill(n, pow(thrust.oblateness(), n)*weight);
       }
 
       // Jets
@@ -118,9 +122,9 @@
       if (durjet.clusterSeq()) {
         /// @todo Need separate normalisation due to clusterseq / 3 jet requirement?
         const double y23 = durjet.clusterSeq()->exclusive_ymerge(3);
-        _histY23Durham[ih]->fill(y23, weight);
+        _histY23Durham[_isqrts]->fill(y23, weight);
         for (int n = 1; n <= 5; ++n) {
-          _histY23DurhamMom[ih]->fill(n, pow(y23, n)*weight);
+          _histY23DurhamMom[_isqrts]->fill(n, pow(y23, n)*weight);
         }
       }
 
@@ -128,20 +132,20 @@
       const Sphericity& sphericity = applyProjection<Sphericity>(event, "Sphericity");
       const double sph = sphericity.sphericity();
       const double apl = sphericity.aplanarity();
-      _histSphericity[ih]->fill(sph, weight);
-      _histAplanarity[ih]->fill(apl, weight);
+      _histSphericity[_isqrts]->fill(sph, weight);
+      _histAplanarity[_isqrts]->fill(apl, weight);
       for (int n = 1; n <= 5; ++n) {
-        _histSphericityMom[ih]->fill(n, pow(sph, n)*weight);
+        _histSphericityMom[_isqrts]->fill(n, pow(sph, n)*weight);
       }
 
       // C & D params
       const ParisiTensor& parisi = applyProjection<ParisiTensor>(event, "Parisi");
       const double cparam = parisi.C();
       const double dparam = parisi.D();
-      _histCParam[ih]->fill(cparam, weight);
-      _histDParam[ih]->fill(dparam, weight);
+      _histCParam[_isqrts]->fill(cparam, weight);
+      _histDParam[_isqrts]->fill(dparam, weight);
       for (int n = 1; n <= 5; ++n) {
-        _histCParamMom[ih]->fill(n, pow(cparam, n)*weight);
+        _histCParamMom[_isqrts]->fill(n, pow(cparam, n)*weight);
       }
    
       // Hemispheres
@@ -151,59 +155,59 @@
       const double hemi_bmax = hemi.Bmax();
       const double hemi_bmin = hemi.Bmin();
       const double hemi_bsum = hemi.Bsum();
-      _histHemiMassH[ih]->fill(hemi_mh, weight);
-      _histHemiMassL[ih]->fill(hemi_ml, weight);
-      _histHemiBroadW[ih]->fill(hemi_bmax, weight);
-      _histHemiBroadN[ih]->fill(hemi_bmin, weight);
-      _histHemiBroadT[ih]->fill(hemi_bsum, weight);
+      _histHemiMassH[_isqrts]->fill(hemi_mh, weight);
+      _histHemiMassL[_isqrts]->fill(hemi_ml, weight);
+      _histHemiBroadW[_isqrts]->fill(hemi_bmax, weight);
+      _histHemiBroadN[_isqrts]->fill(hemi_bmin, weight);
+      _histHemiBroadT[_isqrts]->fill(hemi_bsum, weight);
       for (int n = 1; n <= 5; ++n) {
-        _histHemiMassHMom[ih]->fill(n, pow(hemi_mh, n)*weight);
-        _histHemiMassLMom[ih]->fill(n, pow(hemi_ml, n)*weight);
-        _histHemiBroadWMom[ih]->fill(n, pow(hemi_bmax, n)*weight);
-        _histHemiBroadNMom[ih]->fill(n, pow(hemi_bmin, n)*weight);
-        _histHemiBroadTMom[ih]->fill(n, pow(hemi_bsum, n)*weight);
+        _histHemiMassHMom[_isqrts]->fill(n, pow(hemi_mh, n)*weight);
+        _histHemiMassLMom[_isqrts]->fill(n, pow(hemi_ml, n)*weight);
+        _histHemiBroadWMom[_isqrts]->fill(n, pow(hemi_bmax, n)*weight);
+        _histHemiBroadNMom[_isqrts]->fill(n, pow(hemi_bmin, n)*weight);
+        _histHemiBroadTMom[_isqrts]->fill(n, pow(hemi_bsum, n)*weight);
       }
     }
 
 
     void finalize() {
-      /// @todo Normalisations / scalings, etc.
-      for (int isqrts = 0; isqrts < 4; ++isqrts) {
-        normalize(_hist1MinusT[isqrts]);
-        normalize(_histTMajor[isqrts]);
-        normalize(_histTMinor[isqrts]);
-        normalize(_histOblateness[isqrts]);
-        normalize(_histSphericity[isqrts]);
-        normalize(_histAplanarity[isqrts]);
-        normalize(_histHemiMassH[isqrts]);
-        normalize(_histHemiMassL[isqrts]);
-        normalize(_histHemiBroadW[isqrts]);
-        normalize(_histHemiBroadN[isqrts]);
-        normalize(_histHemiBroadT[isqrts]);
-        normalize(_histCParam[isqrts]);
-        normalize(_histDParam[isqrts]);
-        normalize(_histY23Durham[isqrts]);
-        //
-        scale(_hist1MinusTMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histTMajorMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histTMinorMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histOblatenessMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histSphericityMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histHemiMassHMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histHemiMassLMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histHemiBroadWMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histHemiBroadNMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histHemiBroadTMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histCParamMom[isqrts], 1.0/_sumPassedWeights);
-        scale(_histY23DurhamMom[isqrts], 1.0/_sumPassedWeights);
-      }
+      normalize(_hist1MinusT[_isqrts]);
+      normalize(_histTMajor[_isqrts]);
+      normalize(_histTMinor[_isqrts]);
+      normalize(_histOblateness[_isqrts]);
+      normalize(_histSphericity[_isqrts]);
+      normalize(_histAplanarity[_isqrts]);
+      normalize(_histHemiMassH[_isqrts]);
+      normalize(_histHemiMassL[_isqrts]);
+      normalize(_histHemiBroadW[_isqrts]);
+      normalize(_histHemiBroadN[_isqrts]);
+      normalize(_histHemiBroadT[_isqrts]);
+      normalize(_histCParam[_isqrts]);
+      normalize(_histDParam[_isqrts]);
+      normalize(_histY23Durham[_isqrts]);
+      //
+      scale(_hist1MinusTMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histTMajorMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histTMinorMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histOblatenessMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histSphericityMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histHemiMassHMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histHemiMassLMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histHemiBroadWMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histHemiBroadNMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histHemiBroadTMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histCParamMom[_isqrts], 1.0/_sumPassedWeights);
+      scale(_histY23DurhamMom[_isqrts], 1.0/_sumPassedWeights);
     }
-
+    
     //@}
-
-
+    
+    
   private:
-
+    
+    // Beam energy index for histograms
+    int _isqrts;
+    
     // Counter of event weights passing the cuts
     double _sumPassedWeights;
 

Modified: trunk/src/Core/Analysis.cc
==============================================================================
--- trunk/src/Core/Analysis.cc	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/src/Core/Analysis.cc	Fri Dec  4 01:26:26 2009	(r2132)
@@ -52,6 +52,15 @@
   }
 
 
+  double Analysis::sqrtS() const {
+    return handler().sqrtS();
+  }
+
+  const BeamPair& Analysis::beams() const {
+    return handler().beams();
+  }
+
+
   const string Analysis::histoDir() const {
     string path = "/" + name();
     if (handler().runName().length() > 0) {
@@ -124,10 +133,6 @@
     return _info->runInfo();
   }
 
-  const std::pair<ParticleName,ParticleName>& Analysis::beams() const {
-    return info().beams();
-  }
-
   const std::vector<std::pair<double,double> >& Analysis::energies() const {
     return info().energies();
   }
@@ -167,6 +172,7 @@
     return *this;
   }
 
+
   bool Analysis::isCompatible(const ParticleName& beam1, const ParticleName& beam2) const {
     BeamPair beams(beam1, beam2);
     return compatible(beams, requiredBeams());
@@ -180,6 +186,7 @@
     /// beam requirements with those of the projections it uses.
   }
 
+
   Analysis& Analysis::setCrossSection(const double& xs) {
     _crossSection = xs;
     _gotCrossSection = true;
@@ -209,6 +216,7 @@
     return _crossSection / sumW;
   }
 
+
   AnalysisHandler& Analysis::handler() const {
     return *_analysishandler;
   }

Modified: trunk/src/Core/AnalysisHandler.cc
==============================================================================
--- trunk/src/Core/AnalysisHandler.cc	Thu Dec  3 16:14:36 2009	(r2131)
+++ trunk/src/Core/AnalysisHandler.cc	Fri Dec  4 01:26:26 2009	(r2132)
@@ -13,7 +13,8 @@
   AnalysisHandler::AnalysisHandler(string basefilename,
                                    string runname, HistoFormat storetype)
     : _runname(runname), _nRun(0), _iRun(0), _numEvents(0), 
-      _sumOfWeights(0.0), _xs(-1.0) 
+      _sumOfWeights(0.0), _xs(-1.0), 
+      _beams(BeamPair(ANY,ANY)), _sqrts(-1.0)
   {
     _theAnalysisFactory = createAnalysisFactory();
     _setupFactories(basefilename, storetype);
@@ -24,6 +25,7 @@
                                    string runname, HistoFormat storetype)
     : _runname(runname), _nRun(0), _iRun(0), _numEvents(0), 
       _sumOfWeights(0.0), _xs(-1.0),
+      _beams(BeamPair(ANY,ANY)), _sqrts(-1.0),
       _theAnalysisFactory(&afac) 
   {
     _setupFactories(basefilename, storetype);


More information about the Rivet-svn mailing list