[Rivet-svn] r3517 - in branches/2011-07-aida2yoda: include/Rivet src/Core src/Tools

blackhole at projects.hepforge.org blackhole at projects.hepforge.org
Wed Dec 7 12:02:30 GMT 2011


Author: buckley
Date: Wed Dec  7 12:02:30 2011
New Revision: 3517

Log:
* Use the YODA data type names natively in Rivet, as a precursor to making
wrappers for them to handle stats for NLO correlated events and multi-weight
events.

* Make the integral(Histo1D*) function just use the YODA Histo1D::integral()
function.

* Simplifying the Analysis::normalize and Analysis::scale methods to no longer
attempt conversion to scatters.

Modified:
   branches/2011-07-aida2yoda/include/Rivet/Analysis.hh
   branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh
   branches/2011-07-aida2yoda/src/Core/Analysis.cc
   branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc

Modified: branches/2011-07-aida2yoda/include/Rivet/Analysis.hh
==============================================================================
--- branches/2011-07-aida2yoda/include/Rivet/Analysis.hh	Tue Dec  6 16:26:07 2011	(r3516)
+++ branches/2011-07-aida2yoda/include/Rivet/Analysis.hh	Wed Dec  7 12:02:30 2011	(r3517)
@@ -288,37 +288,25 @@
     /// Access the controlling AnalysisHandler object.
     AnalysisHandler& handler() const { return *_analysishandler; }
 
-    /// Normalize the given histogram, @a histo. After this call the
-    /// histogram will have been transformed to a Scatter2D with the
-    /// same name and path. It has the same effect as
-    /// @c scale(histo, norm/sumOfWeights).
-    /// @param histo The histogram to be normalised.
-    /// @param norm The new area of the histogram.
-    /// @warning The old histogram will be deleted, and its pointer set to zero.
-    void normalize(Histo1DPtr histo, double norm=1.0);
-
-    /// Multiplicatively scale the given histogram, @a histo. After this call the
-    /// histogram will have been transformed to a Scatter2D with the same name and path.
-    /// @param histo The histogram to be scaled.
-    /// @param scale The factor used to multiply the histogram bin heights.
-    /// @warning The old histogram will be deleted, and its pointer set to zero.
+    /// Normalize the given histogram, @a histo, to area = @a norm.
+    ///
+    /// NB. The histogram is no longer invalidated by this procedure.
+    void normalize(Histo1DPtr histo, double norm=1.0, bool includeoverflows=true);
+
+    /// Multiplicatively scale the given histogram, @a histo, by factor @s scale.
+    ///
+    /// NB. The histogram is no longer invalidated by this procedure.
     void scale(Histo1DPtr histo, double scale);
 
-    /// Normalize the given histogram, @a histo. After this call the
-    /// histogram will have been transformed to a Scatter2D with the
-    /// same name and path. It has the same effect as
-    /// @c scale(histo, norm/sumOfWeights).
-    /// @param histo The histogram to be normalised.
-    /// @param norm The new area of the histogram.
-    /// @warning The old histogram will be deleted, and its pointer set to zero.
-    // void normalize(AIDA::IHistogram2D*& histo, double norm=1.0);
-
-    /// Multiplicatively scale the given histogram, @a histo. After this call the
-    /// histogram will have been transformed to a Scatter2D with the same name and path.
-    /// @param histo The histogram to be scaled.
-    /// @param scale The factor used to multiply the histogram bin heights.
-    /// @warning The old histogram will be deleted, and its pointer set to zero.
-    // void scale(AIDA::IHistogram2D*& histo, double scale);
+    /// Normalize the given histogram, @a histo, to area = @a norm.
+    ///
+    /// NB. The histogram is no longer invalidated by this procedure.
+    // void normalize(Histo2DPtr histo, double norm=1.0);
+
+    /// Multiplicatively scale the given histogram, @a histo, by factor @s scale.
+    ///
+    /// NB. The histogram is no longer invalidated by this procedure.
+    // void scale(Histo2DPtr histo, double scale);
 
     /// Set the cross section from the generator
     Analysis& setCrossSection(double xs);

Modified: branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh
==============================================================================
--- branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh	Tue Dec  6 16:26:07 2011	(r3516)
+++ branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh	Wed Dec  7 12:02:30 2011	(r3517)
@@ -21,25 +21,22 @@
 
   using YODA::WriterYODA;
   using YODA::ReaderAIDA;
-  using YODA::Histo1D;
-  using YODA::Profile1D;
-  using YODA::Scatter2D;
-  using YODA::Point2D;
+
+  typedef YODA::Histo1D Histo1D;
+  typedef YODA::Profile1D Profile1D;
+  typedef YODA::Scatter2D Scatter2D;
+  typedef YODA::Point2D Point2D;
 
   /// Function to get a map of all the refdata in a paper with the
   /// given @a papername.
-  RefDataMap getRefData(string papername);
+  RefDataMap getRefData(const string& papername);
 
   /// Get the file system path to the AIDA reference file for this paper.
-  string getDatafilePath(string papername);
+  string getDatafilePath(const string& papername);
 
   /// Return the integral over the histogram bins
   inline double integral(Histo1DPtr histo) {
-    double intg = 0.;
-    for ( size_t i = 0; i < histo->numBins(); ++i ) {
-      intg += histo->bin(i).area();
-    }
-    return intg;
+    return histo->integral();
   }
 
 }

Modified: branches/2011-07-aida2yoda/src/Core/Analysis.cc
==============================================================================
--- branches/2011-07-aida2yoda/src/Core/Analysis.cc	Tue Dec  6 16:26:07 2011	(r3516)
+++ branches/2011-07-aida2yoda/src/Core/Analysis.cc	Wed Dec  7 12:02:30 2011	(r3517)
@@ -394,153 +394,49 @@
   // }
 
 
-  void Analysis::normalize(Histo1DPtr histo, double norm) {
+  void Analysis::normalize(Histo1DPtr histo, double norm, bool includeoverflows) {
     if (!histo) {
-      MSG_ERROR("Failed to normalize histo=NULL in analysis "
-                << name() << " (norm=" << norm << ")");
+      MSG_ERROR("Failed to normalize histo=NULL in analysis " << name() << " (norm=" << norm << ")");
       return;
     }
-    const string hpath = histo->path();
-    MSG_TRACE("Normalizing histo " << hpath << " to " << norm);
-
-    double oldintg = 0.0;
-    int nBins = histo->numBins();
-    for (int iBin = 0; iBin != nBins; ++iBin) {
-      oldintg += histo->bin(iBin).area();
-    }
-    if (oldintg == 0.0) {
-      MSG_WARNING("Histo " << hpath << " has null integral during normalization");
+    MSG_TRACE("Normalizing histo " << histo->path() << " to " << norm);
+    try {
+      histo->normalize(norm, includeoverflows);
+    } catch (YODA::WeightError& we) {
+      MSG_WARNING("Could not normalize histo " << histo->path());
       return;
     }
-
-    // Scale by the normalisation factor.
-    scale(histo, norm/oldintg);
   }
 
 
   void Analysis::scale(Histo1DPtr histo, double scale) {
     if (!histo) {
-      MSG_ERROR("Failed to scale histo=NULL in analysis "
-                << name() << " (scale=" << scale << ")");
+      MSG_ERROR("Failed to scale histo=NULL in analysis " << name() << " (scale=" << scale << ")");
       return;
     }
-    const string hpath = histo->path();
-    MSG_TRACE("Scaling histo " << hpath);
-
-    vector<double> x, y, ex, ey;
-    for (size_t i = 0, N = histo->numBins(); i < N; ++i) {
-      x.push_back( histo->bin(i).midpoint() );
-      ex.push_back(histo->bin(i).width()*0.5);
-
-      // We'd like to do this: y.push_back(histo->binHeight(i) * scale);
-      y.push_back(histo->bin(i).height()*scale);
-
-      // We'd like to do this: ey.push_back(histo->binError(i) * scale);
-      ey.push_back(histo->bin(i).heightErr()*scale);
+    MSG_TRACE("Scaling histo " << histo->path() << "by factor " << scale);
+    try {
+      histo->scaleW(scale);
+    } catch (YODA::WeightError& we) {
+      MSG_WARNING("Could not normalize histo " << histo->path());
+      return;
     }
-
-    string title = histo->title();
-    // string xtitle = histo->xtitle();
-    // string ytitle = histo->ytitle();
-
-
-    // \todo YODA
-
-    // tree().mkdir("/tmpnormalize");
-    // tree().mv(hpath, "/tmpnormalize");
-
-    Scatter2DPtr dps( new Scatter2D(x, y, ex, ey, hpath, title) );
-    addPlot(dps);
-
-    // dps->setXTitle(xtitle);
-    // dps->setYTitle(ytitle);
-
-    // tree().rm(tree().findPath(dynamic_cast<AIDA::IManagedObject&>(*histo)));
-    // tree().rmdir("/tmpnormalize");
-
-    // // Set histo pointer to null - it can no longer be used.
-    // histo = 0;
+    // // Transforming the histo into a scatter after scaling
+    // vector<double> x, y, ex, ey;
+    // for (size_t i = 0, N = histo->numBins(); i < N; ++i) {
+    //   x.push_back( histo->bin(i).midpoint() );
+    //   ex.push_back(histo->bin(i).width()*0.5);
+    //   y.push_back(histo->bin(i).height()*scale);
+    //   ey.push_back(histo->bin(i).heightErr()*scale);
+    // }
+    // string title = histo->title();
+    // Scatter2DPtr dps( new Scatter2D(x, y, ex, ey, hpath, title) );
+    // addPlot(dps);
   }
 
 
-  // void Analysis::normalize(AIDA::IHistogram2D*& histo, double norm) {
-  //   if (!histo) {
-  //     MSG_ERROR("Failed to normalize histo=NULL in analysis "
-  //               << name() << " (norm=" << norm << ")");
-  //     return;
-  //   }
-  //   const string hpath = tree().findPath(dynamic_cast<const AIDA::IManagedObject&>(*histo));
-  //   MSG_TRACE("Normalizing histo " << hpath << " to " << norm);
-
-  //   double oldintg = 0.0;
-  //   int nxBins = histo->xAxis().bins();
-  //   int nyBins = histo->yAxis().bins();
-  //   for (int ixBin = 0; ixBin != nxBins; ++ixBin)
-  //     for (int iyBin = 0; iyBin != nyBins; ++iyBin) {
-  //     // Leaving out factor of binWidth because AIDA's "height"
-  //     // already includes a width factor.
-  // 	oldintg += histo->binHeight(ixBin, iyBin); // * histo->axis().binWidth(iBin);
-  //   }
-  //   if (oldintg == 0.0) {
-  //     MSG_WARNING("Histo " << hpath << " has null integral during normalization");
-  //     return;
-  //   }
-
-  //   // Scale by the normalisation factor.
-  //   scale(histo, norm/oldintg);
-  // }
-
-
-  // void Analysis::scale(AIDA::IHistogram2D*& histo, double scale) {
-  //   if (!histo) {
-  //     MSG_ERROR("Failed to scale histo=NULL in analysis "
-  //               << name() << " (scale=" << scale << ")");
-  //     return;
-  //   }
-  //   const string hpath =
-  //     tree().findPath(dynamic_cast<const AIDA::IManagedObject&>(*histo));
-  //   MSG_TRACE("Scaling histo " << hpath);
-
-  //   vector<double> x, y, z, ex, ey, ez;
-  //   for (size_t ix = 0, Nx = histo->xAxis().bins(); ix < Nx; ++ix)
-  //     for (size_t iy = 0, Ny = histo->yAxis().bins(); iy < Ny; ++iy) {
-  // 	x.push_back(0.5 * (histo->xAxis().binLowerEdge(ix) +
-  // 			   histo->xAxis().binUpperEdge(ix)));
-  // 	ex.push_back(histo->xAxis().binWidth(ix)*0.5);
-  // 	y.push_back(0.5 * (histo->yAxis().binLowerEdge(iy) +
-  // 			   histo->yAxis().binUpperEdge(iy)));
-  // 	ey.push_back(histo->yAxis().binWidth(iy)*0.5);
-
-  // 	// "Bin height" is a misnomer in the AIDA spec: width is neglected.
-  // 	// We'd like to do this: y.push_back(histo->binHeight(i) * scale);
-  // 	z.push_back(histo->binHeight(ix, iy)*scale/
-  // 		    (histo->xAxis().binWidth(ix)*histo->yAxis().binWidth(iy)));
-  // 	// "Bin error" is a misnomer in the AIDA spec: width is neglected.
-  // 	// We'd like to do this: ey.push_back(histo->binError(i) * scale);
-  // 	ez.push_back(histo->binError(ix, iy)*scale/
-  // 		     (histo->xAxis().binWidth(ix)*histo->yAxis().binWidth(iy)));
-  //   }
-
-  //   string title = histo->title();
-  //   string xtitle = histo->xtitle();
-  //   string ytitle = histo->ytitle();
-  //   string ztitle = histo->ztitle();
-
-  //   tree().mkdir("/tmpnormalize");
-  //   tree().mv(hpath, "/tmpnormalize");
-
-  //   Scatter2DPtr dps =
-  //     datapointsetFactory().createXYZ(hpath, title, x, y, z, ex, ey, ez);
-  //   dps->setXTitle(xtitle);
-  //   dps->setYTitle(ytitle);
-  //   dps->setZTitle(ztitle);
+  /// @todo 2D versions of scale and normalize... or ditch these completely?
 
-  //   tree().rm(tree().findPath(dynamic_cast<AIDA::IManagedObject&>(*histo)));
-  //   tree().rmdir("/tmpnormalize");
-
-  //   // Set histo pointer to null - it can no longer be used.
-  //   histo = 0;
-  // }
 
   void Analysis::addPlot(AnalysisObjectPtr ao) {
     _plotobjects.push_back(ao);

Modified: branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc
==============================================================================
--- branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc	Tue Dec  6 16:26:07 2011	(r3516)
+++ branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc	Wed Dec  7 12:02:30 2011	(r3517)
@@ -6,7 +6,8 @@
 
 namespace Rivet {
 
-  string getDatafilePath(string papername) {
+
+  string getDatafilePath(const string& papername) {
     const string path =  findAnalysisRefFile(papername + ".aida");
     if (!path.empty()) return path;
     throw Rivet::Error("Couldn't find ref data file '" + papername + ".aida" +
@@ -14,27 +15,29 @@
     return "";
   }
 
-  RefDataMap getRefData(string papername) {
+
+  RefDataMap getRefData(const string& papername) {
     // Get filename
     const string xmlfile = getDatafilePath(papername);
 
     YODA::Reader & reader =  ReaderAIDA::create();
     vector<YODA::AnalysisObject *> aovec;
     reader.read(xmlfile, aovec);
+    /// @todo Remove debug cerr
     cerr << "HERE2 " << aovec.size() << '\n';
 
     // Return value, to be populated
     RefDataMap rtn;
-
     foreach ( YODA::AnalysisObject * ao, aovec ) {
       Scatter2DPtr refdata( dynamic_cast<Scatter2D *>(ao) );
-      if ( ! refdata ) 
-	continue;
+      if (!refdata) continue;
       string plotpath = refdata->path();
+      /// @todo Remove debug cerr
       cerr << plotpath << '\n';
       rtn[plotpath] = refdata;
     }
     return rtn;
   }
 
+
 }


More information about the Rivet-svn mailing list