|
[Rivet-svn] r3209 - in branches/2011-07-aida2yoda: include/Rivet include/Rivet/Analyses src src/Analyses src/Core src/Toolsblackhole at projects.hepforge.org blackhole at projects.hepforge.orgTue Jul 19 16:22:40 BST 2011
Author: dgrell Date: Tue Jul 19 16:22:39 2011 New Revision: 3209 Log: DataPointSet converted. Caching not yet done. Modified: branches/2011-07-aida2yoda/include/Rivet/Analyses/MC_JetAnalysis.hh branches/2011-07-aida2yoda/include/Rivet/Analysis.hh branches/2011-07-aida2yoda/include/Rivet/RivetYODA.fhh branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh branches/2011-07-aida2yoda/src/Analyses/MC_GENERIC.cc branches/2011-07-aida2yoda/src/Analyses/MC_JetAnalysis.cc branches/2011-07-aida2yoda/src/Analyses/MC_WJETS.cc branches/2011-07-aida2yoda/src/Analyses/MC_WPOL.cc branches/2011-07-aida2yoda/src/Analyses/Makefile.am branches/2011-07-aida2yoda/src/Core/Analysis.cc branches/2011-07-aida2yoda/src/Makefile.am branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc Modified: branches/2011-07-aida2yoda/include/Rivet/Analyses/MC_JetAnalysis.hh ============================================================================== --- branches/2011-07-aida2yoda/include/Rivet/Analyses/MC_JetAnalysis.hh Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/include/Rivet/Analyses/MC_JetAnalysis.hh Tue Jul 19 16:22:39 2011 (r3209) @@ -4,7 +4,6 @@ #include "Rivet/Analysis.hh" #include "Rivet/Projections/FinalState.hh" -#include "LWH/Histogram1D.h" namespace Rivet { @@ -45,18 +44,18 @@ /// @name Histograms //@{ std::vector<Histo1DPtr> _h_log10_d; - std::vector<AIDA::IDataPointSet *> _h_log10_R; + std::vector<Scatter2DPtr> _h_log10_R; std::vector<Histo1DPtr> _h_pT_jet; std::vector<Histo1DPtr> _h_eta_jet; - std::vector<shared_ptr<LWH::Histogram1D> > _h_eta_jet_plus, _h_eta_jet_minus; + std::vector<Histo1DPtr> _h_eta_jet_plus, _h_eta_jet_minus; std::vector<Histo1DPtr> _h_rap_jet; - std::vector<shared_ptr<LWH::Histogram1D> > _h_rap_jet_plus, _h_rap_jet_minus; + std::vector<Histo1DPtr> _h_rap_jet_plus, _h_rap_jet_minus; std::vector<Histo1DPtr> _h_mass_jet; std::map<std::pair<size_t, size_t>, Histo1DPtr> _h_deta_jets; std::map<std::pair<size_t, size_t>, Histo1DPtr> _h_dR_jets; Histo1DPtr _h_jet_multi_exclusive; Histo1DPtr _h_jet_multi_inclusive; - AIDA::IDataPointSet * _h_jet_multi_ratio; + Scatter2DPtr _h_jet_multi_ratio; Histo1DPtr _h_jet_HT; //@} Modified: branches/2011-07-aida2yoda/include/Rivet/Analysis.hh ============================================================================== --- branches/2011-07-aida2yoda/include/Rivet/Analysis.hh Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/include/Rivet/Analysis.hh Tue Jul 19 16:22:39 2011 (r3209) @@ -285,7 +285,7 @@ AnalysisHandler& handler() const { return *_analysishandler; } /// Normalize the given histogram, @a histo. After this call the - /// histogram will have been transformed to a DataPointSet with the + /// histogram will have been transformed to a Scatter2D with the /// same name and path. It has the same effect as /// @c scale(histo, norm/sumOfWeights). /// @param histo The histogram to be normalised. @@ -294,14 +294,14 @@ void normalize(Histo1DPtr histo, double norm=1.0); /// Multiplicatively scale the given histogram, @a histo. After this call the - /// histogram will have been transformed to a DataPointSet with the same name and path. + /// histogram will have been transformed to a Scatter2D with the same name and path. /// @param histo The histogram to be scaled. /// @param scale The factor used to multiply the histogram bin heights. /// @warning The old histogram will be deleted, and its pointer set to zero. void scale(Histo1DPtr histo, double scale); /// Normalize the given histogram, @a histo. After this call the - /// histogram will have been transformed to a DataPointSet with the + /// histogram will have been transformed to a Scatter2D with the /// same name and path. It has the same effect as /// @c scale(histo, norm/sumOfWeights). /// @param histo The histogram to be normalised. @@ -310,7 +310,7 @@ // void normalize(AIDA::IHistogram2D*& histo, double norm=1.0); /// Multiplicatively scale the given histogram, @a histo. After this call the - /// histogram will have been transformed to a DataPointSet with the same name and path. + /// histogram will have been transformed to a Scatter2D with the same name and path. /// @param histo The histogram to be scaled. /// @param scale The factor used to multiply the histogram bin heights. /// @warning The old histogram will be deleted, and its pointer set to zero. @@ -468,35 +468,35 @@ /// @name Internal data point set booking (for use by Analysis sub-classes). //@{ - // /// Book a 2-dimensional data point set. - // /// (NB. this returns a pointer rather than a reference since it will - // /// have to be stored in the analysis class - there's no point in forcing users to explicitly - // /// get the pointer from a reference before they can use it!) - // AIDA::IDataPointSet* bookDataPointSet(const std::string& name, const std::string& title="", - // const std::string& xtitle="", const std::string& ytitle=""); - - - // /// Book a 2-dimensional data point set with equally spaced points in a range. - // /// (NB. this returns a pointer rather than a reference since it will - // /// have to be stored in the analysis class - there's no point in forcing users to explicitly - // /// get the pointer from a reference before they can use it!) - // AIDA::IDataPointSet* bookDataPointSet(const std::string& name, - // size_t npts, double lower, double upper, - // const std::string& title="", - // const std::string& xtitle="", const std::string& ytitle=""); - - // /// Book a 2-dimensional data point set based on the corresponding AIDA data - // /// file. The binnings (x-errors) will be obtained by reading the bundled - // /// AIDA data record file of the same filename as the analysis' name() - // /// property. - // //AIDA::IDataPointSet* bookDataPointSet(const std::string& name, const std::string& title); - - // /// Book a 2-dimensional data point set based on the paper, dataset and x/y-axis IDs in the corresponding - // /// HepData record. The binnings (x-errors) will be obtained by reading the bundled AIDA data record file - // /// of the same filename as the analysis' name() property. - // AIDA::IDataPointSet* bookDataPointSet(size_t datasetId, size_t xAxisId, size_t yAxisId, - // const std::string& title="", - // const std::string& xtitle="", const std::string& ytitle=""); + /// Book a 2-dimensional data point set. + /// (NB. this returns a pointer rather than a reference since it will + /// have to be stored in the analysis class - there's no point in forcing users to explicitly + /// get the pointer from a reference before they can use it!) + Scatter2DPtr bookScatter2D(const std::string& name, const std::string& title="", + const std::string& xtitle="", const std::string& ytitle=""); + + + /// Book a 2-dimensional data point set with equally spaced points in a range. + /// (NB. this returns a pointer rather than a reference since it will + /// have to be stored in the analysis class - there's no point in forcing users to explicitly + /// get the pointer from a reference before they can use it!) + Scatter2DPtr bookScatter2D(const std::string& name, + size_t npts, double lower, double upper, + const std::string& title="", + const std::string& xtitle="", const std::string& ytitle=""); + + /// Book a 2-dimensional data point set based on the corresponding AIDA data + /// file. The binnings (x-errors) will be obtained by reading the bundled + /// AIDA data record file of the same filename as the analysis' name() + /// property. + Scatter2DPtr bookScatter2D(const std::string& name, const std::string& title); + + /// Book a 2-dimensional data point set based on the paper, dataset and x/y-axis IDs in the corresponding + /// HepData record. The binnings (x-errors) will be obtained by reading the bundled AIDA data record file + /// of the same filename as the analysis' name() property. + Scatter2DPtr bookScatter2D(size_t datasetId, size_t xAxisId, size_t yAxisId, + const std::string& title="", + const std::string& xtitle="", const std::string& ytitle=""); //@} @@ -511,11 +511,11 @@ /// @name Utility functions //@{ - // /// Get the bin edges for this paper from the reference AIDA file, and cache them. - // void _cacheBinEdges() const; + /// Get the bin edges for this paper from the reference AIDA file, and cache them. + void _cacheBinEdges() const; - // /// Get the x-axis points for this paper from the reference AIDA file, and cache them. - // void _cacheXAxisData() const; + /// Get the x-axis points for this paper from the reference AIDA file, and cache them. + void _cacheXAxisData() const; //@} @@ -549,7 +549,7 @@ // /// Collection of x-axis point data to speed up many autobookings: the // /// reference data file should only be read once. // /// @todo Reduce memory occupancy, or clear after initialisation? - // mutable map<string, vector<DPSXPoint> > _dpsData; + mutable map<string, vector<DPSXPoint> > _dpsData; /// Collection of cached bin edges to speed up many autobookings: the /// reference data file should only be read once. Modified: branches/2011-07-aida2yoda/include/Rivet/RivetYODA.fhh ============================================================================== --- branches/2011-07-aida2yoda/include/Rivet/RivetYODA.fhh Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/include/Rivet/RivetYODA.fhh Tue Jul 19 16:22:39 2011 (r3209) @@ -15,17 +15,14 @@ class WriterYODA; class Histo1D; class Profile1D; -// class IHistogram2D; -// class IMeasurement; -// class ITreeFactory; -// class ITree; -// class IAxis; + class Scatter2D; } namespace Rivet { typedef boost::shared_ptr<YODA::AnalysisObject> AnalysisObjectPtr; typedef boost::shared_ptr<YODA::Histo1D> Histo1DPtr; typedef boost::shared_ptr<YODA::Profile1D> Profile1DPtr; + typedef boost::shared_ptr<YODA::Scatter2D> Scatter2DPtr; } #include <vector> @@ -35,26 +32,26 @@ // /// Typedef for a collection of bin edges. typedef std::vector<double> BinEdges; -// /// Container for x-axis point details -// struct DPSXPoint { -// DPSXPoint(double xval, double xerrminus, double xerrplus) : -// val(xval), errminus(xerrminus), errplus(xerrplus) { } -// double val; -// double errminus; -// double errplus; -// }; - -// /// Container for x-axis point details -// struct DPSXYPoint { -// DPSXYPoint(double xval, double xerrminus, double xerrplus, double yval, double yerrminus, double yerrplus) : -// xval(xval), xerrminus(xerrminus), xerrplus(xerrplus), yval(yval), yerrminus(yerrminus), yerrplus(yerrplus) { } -// double xval; -// double xerrminus; -// double xerrplus; -// double yval; -// double yerrminus; -// double yerrplus; -// }; + /// Container for x-axis point details + struct DPSXPoint { + DPSXPoint(double xval, double xerrminus, double xerrplus) : + val(xval), errminus(xerrminus), errplus(xerrplus) { } + double val; + double errminus; + double errplus; + }; + + /// Container for x-axis point details + struct DPSXYPoint { + DPSXYPoint(double xval, double xerrminus, double xerrplus, double yval, double yerrminus, double yerrplus) : + xval(xval), xerrminus(xerrminus), xerrplus(xerrplus), yval(yval), yerrminus(yerrminus), yerrplus(yerrplus) { } + double xval; + double xerrminus; + double xerrplus; + double yval; + double yerrminus; + double yerrplus; + }; } Modified: branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh ============================================================================== --- branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/include/Rivet/RivetYODA.hh Tue Jul 19 16:22:39 2011 (r3209) @@ -13,6 +13,8 @@ #include "YODA/WriterYODA.h" #include "YODA/Histo1D.h" #include "YODA/Profile1D.h" +#include "YODA/Scatter2D.h" +#include "YODA/Point2D.h" namespace Rivet { @@ -20,34 +22,29 @@ /// given @a papername. map<string, BinEdges> getBinEdges(string papername); - // map<string, BinEdges> getBinEdges(const map<string, vector<DPSXPoint> >& xpoints); + map<string, BinEdges> getBinEdges(const map<string, vector<DPSXPoint> >& xpoints); - // map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername); + map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername); - // map<string, vector<DPSXYPoint> > getDPSXYValsErrs(string papername); + map<string, vector<DPSXYPoint> > getDPSXYValsErrs(string papername); /// Get the file system path to the AIDA reference file for this paper. string getDatafilePath(string papername); - // /// Return the integral over the histogram bins - // inline double integral(Histo1DPtr histo) { - // double intg = 0.; - // for ( int i = 0; i < histo->axis().bins(); ++i ) { - // // Don't multiply with binWidth -- it's already included in binHeight - // intg += histo->binHeight(i); // * histo->axis().binWidth(i); - // } - // return intg; - // } + /// Return the integral over the histogram bins + inline double integral(Histo1DPtr histo) { + double intg = 0.; + for ( size_t i = 0; i < histo->numBins(); ++i ) { + intg += histo->bin(i).area(); + } + return intg; + } using YODA::WriterYODA; using YODA::Histo1D; using YODA::Profile1D; - // using AIDA::IHistogram2D; - // using AIDA::IDataPointSet; - // using AIDA::IDataPoint; - // using AIDA::IMeasurement; - // using AIDA::ITree; - // using AIDA::IAxis; + using YODA::Scatter2D; + using YODA::Point2D; } #endif Modified: branches/2011-07-aida2yoda/src/Analyses/MC_GENERIC.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Analyses/MC_GENERIC.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Analyses/MC_GENERIC.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -6,7 +6,6 @@ #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Projections/UnstableFinalState.hh" #include "Rivet/Projections/MissingMomentum.hh" -#include "LWH/Histogram1D.h" namespace Rivet { @@ -53,10 +52,10 @@ _histEta = bookHisto1D("Eta", 50, -5, 5); _histEtaCh = bookHisto1D("EtaCh", 50, -5, 5); - _tmphistEtaPlus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistEtaMinus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistEtaChPlus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistEtaChMinus.reset(new LWH::Histogram1D(25, 0, 5)); + _tmphistEtaPlus.reset(new Histo1D(25, 0, 5)); + _tmphistEtaMinus.reset(new Histo1D(25, 0, 5)); + _tmphistEtaChPlus.reset(new Histo1D(25, 0, 5)); + _tmphistEtaChMinus.reset(new Histo1D(25, 0, 5)); _histEtaPi = bookHisto1D("EtaPi", 25, 0, 5); _histEtaK = bookHisto1D("EtaK", 25, 0, 5); @@ -65,10 +64,10 @@ _histRapidity = bookHisto1D("Rapidity", 50, -5, 5); _histRapidityCh = bookHisto1D("RapidityCh", 50, -5, 5); - _tmphistRapPlus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistRapMinus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistRapChPlus.reset(new LWH::Histogram1D(25, 0, 5)); - _tmphistRapChMinus.reset(new LWH::Histogram1D(25, 0, 5)); + _tmphistRapPlus.reset(new Histo1D(25, 0, 5)); + _tmphistRapMinus.reset(new Histo1D(25, 0, 5)); + _tmphistRapChPlus.reset(new Histo1D(25, 0, 5)); + _tmphistRapChMinus.reset(new Histo1D(25, 0, 5)); _histPhi = bookHisto1D("Phi", 50, 0, TWOPI); _histPhiCh = bookHisto1D("PhiCh", 50, 0, TWOPI); @@ -180,10 +179,11 @@ scale(_histPhi, 1/sumOfWeights()); scale(_histPhiCh, 1/sumOfWeights()); - histogramFactory().divide(histoPath("EtaPMRatio"), *_tmphistEtaPlus, *_tmphistEtaMinus); - histogramFactory().divide(histoPath("EtaChPMRatio"), *_tmphistEtaChPlus, *_tmphistEtaChMinus); - histogramFactory().divide(histoPath("RapidityPMRatio"), *_tmphistRapPlus, *_tmphistRapMinus); - histogramFactory().divide(histoPath("RapidityChPMRatio"), *_tmphistRapChPlus, *_tmphistRapChMinus); + // \todo YODA divide + // histogramFactory().divide(histoPath("EtaPMRatio"), *_tmphistEtaPlus, *_tmphistEtaMinus); + // histogramFactory().divide(histoPath("EtaChPMRatio"), *_tmphistEtaChPlus, *_tmphistEtaChMinus); + // histogramFactory().divide(histoPath("RapidityPMRatio"), *_tmphistRapPlus, *_tmphistRapMinus); + // histogramFactory().divide(histoPath("RapidityChPMRatio"), *_tmphistRapChPlus, *_tmphistRapChMinus); } //@} @@ -192,17 +192,17 @@ private: /// Temporary histos used to calculate eta+/eta- ratio plot - shared_ptr<LWH::Histogram1D> _tmphistEtaPlus, _tmphistEtaMinus; - shared_ptr<LWH::Histogram1D> _tmphistEtaChPlus, _tmphistEtaChMinus; - shared_ptr<LWH::Histogram1D> _tmphistRapPlus, _tmphistRapMinus; - shared_ptr<LWH::Histogram1D> _tmphistRapChPlus, _tmphistRapChMinus; + Histo1DPtr _tmphistEtaPlus, _tmphistEtaMinus; + Histo1DPtr _tmphistEtaChPlus, _tmphistEtaChMinus; + Histo1DPtr _tmphistRapPlus, _tmphistRapMinus; + Histo1DPtr _tmphistRapChPlus, _tmphistRapChMinus; //@{ /// Histograms Histo1DPtr _histMult, _histMultCh; Histo1DPtr _histStablePIDs, _histDecayedPIDs, _histAllPIDs; Histo1DPtr _histEtaPi, _histEtaK, _histEtaLambda; - AIDA::IProfile1D _histEtaSumEt; + Profile1DPtr _histEtaSumEt; Histo1DPtr _histEta, _histEtaCh; Histo1DPtr _histRapidity, _histRapidityCh; Histo1DPtr _histPt, _histPtCh; Modified: branches/2011-07-aida2yoda/src/Analyses/MC_JetAnalysis.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Analyses/MC_JetAnalysis.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Analyses/MC_JetAnalysis.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -12,10 +12,10 @@ const string& jetpro_name, double jetptcut) : Analysis(name), m_njet(njet), m_jetpro_name(jetpro_name), m_jetptcut(jetptcut), - _h_log10_d(njet, NULL), _h_log10_R(njet+1, NULL), _h_pT_jet(njet, NULL), - _h_eta_jet(njet, NULL), _h_eta_jet_plus(njet), _h_eta_jet_minus(njet), - _h_rap_jet(njet, NULL), _h_rap_jet_plus(njet), _h_rap_jet_minus(njet), - _h_mass_jet(njet, NULL) + _h_log10_d(njet), _h_log10_R(njet+1), _h_pT_jet(njet), + _h_eta_jet(njet), _h_eta_jet_plus(njet), _h_eta_jet_minus(njet), + _h_rap_jet(njet), _h_rap_jet_plus(njet), _h_rap_jet_minus(njet), + _h_mass_jet(njet) { setNeedsCrossSection(true); } @@ -33,7 +33,7 @@ stringstream Rname; Rname << "log10_R_" << i; - _h_log10_R[i] = bookDataPointSet(Rname.str(), 50, 0.2, log10(0.5*sqrtS())); + _h_log10_R[i] = bookScatter2D(Rname.str(), 50, 0.2, log10(0.5*sqrtS())); stringstream pTname; pTname << "jet_pT_" << i+1; @@ -50,14 +50,14 @@ stringstream etaname; etaname << "jet_eta_" << i+1; _h_eta_jet[i] = bookHisto1D(etaname.str(), i > 1 ? 25 : 50, -5.0, 5.0); - _h_eta_jet_plus[i].reset(new LWH::Histogram1D(i > 1 ? 15 : 25, 0, 5)); - _h_eta_jet_minus[i].reset(new LWH::Histogram1D(i > 1 ? 15 : 25, 0, 5)); + _h_eta_jet_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); + _h_eta_jet_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); stringstream rapname; rapname << "jet_y_" << i+1; _h_rap_jet[i] = bookHisto1D(rapname.str(), i>1 ? 25 : 50, -5.0, 5.0); - _h_rap_jet_plus[i].reset(new LWH::Histogram1D(i > 1 ? 15 : 25, 0, 5)); - _h_rap_jet_minus[i].reset(new LWH::Histogram1D(i > 1 ? 15 : 25, 0, 5)); + _h_rap_jet_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); + _h_rap_jet_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); for (size_t j = i+1; j < m_njet; ++j) { std::pair<size_t, size_t> ij = std::make_pair(i, j); @@ -73,11 +73,11 @@ } stringstream Rname; Rname << "log10_R_" << m_njet; - _h_log10_R[m_njet] = bookDataPointSet(Rname.str(), 50, 0.2, log10(0.5*sqrtS())); + _h_log10_R[m_njet] = bookScatter2D(Rname.str(), 50, 0.2, log10(0.5*sqrtS())); _h_jet_multi_exclusive = bookHisto1D("jet_multi_exclusive", m_njet+3, -0.5, m_njet+3-0.5); _h_jet_multi_inclusive = bookHisto1D("jet_multi_inclusive", m_njet+3, -0.5, m_njet+3-0.5); - _h_jet_multi_ratio = bookDataPointSet("jet_multi_ratio", m_njet+2, 0.5, m_njet+3-0.5); + _h_jet_multi_ratio = bookScatter2D("jet_multi_ratio", m_njet+2, 0.5, m_njet+3-0.5); _h_jet_HT = bookHisto1D("jet_HT", logBinEdges(50, m_jetptcut, sqrtS()/GeV/2.0)); } @@ -101,21 +101,21 @@ _h_log10_d[i]->fill(d_ij, weight); // Fill integrated jet resolution - for (int ibin = 0; ibin < _h_log10_R[i]->size(); ++ibin) { - IDataPoint* dp = _h_log10_R[i]->point(ibin); - double dcut = dp->coordinate(0)->value(); + for (size_t ibin = 0; ibin < _h_log10_R[i]->numPoints(); ++ibin) { + Point2D & dp = _h_log10_R[i]->point(ibin); + double dcut = dp.x(); if (d_ij < dcut && previous_dij > dcut) { - dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight); + dp.setY(dp.y() + weight); } } previous_dij = d_ij; } // One remaining integrated jet resolution - for (int ibin = 0; ibin<_h_log10_R[m_njet]->size(); ++ibin) { - IDataPoint* dp = _h_log10_R[m_njet]->point(ibin); - double dcut = dp->coordinate(0)->value(); + for (size_t ibin = 0; ibin<_h_log10_R[m_njet]->numPoints(); ++ibin) { + Point2D & dp = _h_log10_R[m_njet]->point(ibin); + double dcut = dp.x(); if (previous_dij > dcut) { - dp->coordinate(1)->setValue(dp->coordinate(1)->value() + weight); + dp.setY(dp.y() + weight); } } } @@ -187,9 +187,9 @@ void MC_JetAnalysis::finalize() { for (size_t i = 0; i < m_njet; ++i) { scale(_h_log10_d[i], crossSection()/sumOfWeights()); - for (int ibin = 0; ibin<_h_log10_R[i]->size(); ++ibin) { - IDataPoint* dp = _h_log10_R[i]->point(ibin); - dp->coordinate(1)->setValue(dp->coordinate(1)->value()*crossSection()/sumOfWeights()); + for (size_t ibin = 0; ibin<_h_log10_R[i]->numPoints(); ++ibin) { + Point2D & dp = _h_log10_R[i]->point(ibin); + dp.setY(dp.y()*crossSection()/sumOfWeights()); } scale(_h_pT_jet[i], crossSection()/sumOfWeights()); @@ -200,15 +200,16 @@ // Create eta/rapidity ratio plots stringstream etaname; etaname << "jet_eta_pmratio_" << i+1; - histogramFactory().divide(histoPath(etaname.str()), *_h_eta_jet_plus[i], *_h_eta_jet_minus[i]); + // \todo YODA divide + // histogramFactory().divide(histoPath(etaname.str()), *_h_eta_jet_plus[i], *_h_eta_jet_minus[i]); stringstream rapname; rapname << "jet_y_pmratio_" << i+1; - histogramFactory().divide(histoPath(rapname.str()), *_h_rap_jet_plus[i], *_h_rap_jet_minus[i]); + // histogramFactory().divide(histoPath(rapname.str()), *_h_rap_jet_plus[i], *_h_rap_jet_minus[i]); } - for (int ibin = 0; ibin < _h_log10_R[m_njet]->size(); ++ibin) { - IDataPoint* dp =_h_log10_R[m_njet]->point(ibin); - dp->coordinate(1)->setValue(dp->coordinate(1)->value()*crossSection()/sumOfWeights()); + for (size_t ibin = 0; ibin < _h_log10_R[m_njet]->numPoints(); ++ibin) { + Point2D & dp =_h_log10_R[m_njet]->point(ibin); + dp.setY(dp.y()*crossSection()/sumOfWeights()); } // Scale the d{eta,R} histograms @@ -221,18 +222,19 @@ } // Fill inclusive jet multi ratio - int Nbins = _h_jet_multi_inclusive->axis().bins(); - std::vector<double> ratio(Nbins-1, 0.0); - std::vector<double> err(Nbins-1, 0.0); + int Nbins = _h_jet_multi_inclusive->numBins(); for (int i = 0; i < Nbins-1; ++i) { - if (_h_jet_multi_inclusive->binHeight(i) > 0.0 && _h_jet_multi_inclusive->binHeight(i+1) > 0.0) { - ratio[i] = _h_jet_multi_inclusive->binHeight(i+1)/_h_jet_multi_inclusive->binHeight(i); - double relerr_i = _h_jet_multi_inclusive->binError(i)/_h_jet_multi_inclusive->binHeight(i); - double relerr_j = _h_jet_multi_inclusive->binError(i+1)/_h_jet_multi_inclusive->binHeight(i+1); - err[i] = ratio[i] * (relerr_i + relerr_j); + if (_h_jet_multi_inclusive->bin(i).area() > 0.0 && _h_jet_multi_inclusive->bin(i+1).area() > 0.0) { + double ratio = _h_jet_multi_inclusive->bin(i+1).area()/_h_jet_multi_inclusive->bin(i).area(); + double relerr_i = _h_jet_multi_inclusive->bin(i).areaError()/_h_jet_multi_inclusive->bin(i).area(); + double relerr_j = _h_jet_multi_inclusive->bin(i+1).areaError()/_h_jet_multi_inclusive->bin(i+1).area(); + double err = ratio * (relerr_i + relerr_j); + + Point2D & pt = _h_jet_multi_ratio->point(i); + pt.setY(ratio); + pt.setYErr(err); } } - _h_jet_multi_ratio->setCoordinate(1, ratio, err); scale(_h_jet_multi_exclusive, crossSection()/sumOfWeights()); scale(_h_jet_multi_inclusive, crossSection()/sumOfWeights()); Modified: branches/2011-07-aida2yoda/src/Analyses/MC_WJETS.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Analyses/MC_WJETS.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Analyses/MC_WJETS.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -114,18 +114,19 @@ scale(_h_lepton_eta, crossSection()/sumOfWeights()); // Construct asymmetry: (dsig+/deta - dsig-/deta) / (dsig+/deta + dsig-/deta) for each Et region - AIDA::IHistogramFactory& hf = histogramFactory(); - IHistogram1D* numtmp = hf.subtract("/numtmp", *_htmp_dsigplus_deta, *_htmp_dsigminus_deta); - IHistogram1D* dentmp = hf.add("/dentmp", *_htmp_dsigplus_deta, *_htmp_dsigminus_deta); - assert(numtmp && dentmp); - hf.divide(histoDir() + "/W_chargeasymm_eta", *numtmp, *dentmp); - hf.destroy(numtmp); - hf.destroy(dentmp); - hf.destroy(_htmp_dsigminus_deta); - hf.destroy(_htmp_dsigplus_deta); + // \todo YODA + // AIDA::IHistogramFactory& hf = histogramFactory(); + // IHistogram1D* numtmp = hf.subtract("/numtmp", *_htmp_dsigplus_deta, *_htmp_dsigminus_deta); + // IHistogram1D* dentmp = hf.add("/dentmp", *_htmp_dsigplus_deta, *_htmp_dsigminus_deta); + // assert(numtmp && dentmp); + // hf.divide(histoDir() + "/W_chargeasymm_eta", *numtmp, *dentmp); + // hf.destroy(numtmp); + // hf.destroy(dentmp); + // hf.destroy(_htmp_dsigminus_deta); + // hf.destroy(_htmp_dsigplus_deta); - // W charge asymmetry vs. pTW: dsig+/dpT / dsig-/dpT - hf.divide(histoDir() + "/W_chargeasymm_pT", *_h_Wplus_pT, *_h_Wminus_pT); + // // W charge asymmetry vs. pTW: dsig+/dpT / dsig-/dpT + // hf.divide(histoDir() + "/W_chargeasymm_pT", *_h_Wplus_pT, *_h_Wminus_pT); scale(_h_Wplus_pT, crossSection()/sumOfWeights()); scale(_h_Wminus_pT, crossSection()/sumOfWeights()); Modified: branches/2011-07-aida2yoda/src/Analyses/MC_WPOL.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Analyses/MC_WPOL.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Analyses/MC_WPOL.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -46,7 +46,7 @@ _h_dists.resize(tags.size()); _h_histos.resize(tags.size()); for (size_t i=0; i<tags.size(); ++i) { - _h_dists[i].resize(11,NULL); + _h_dists[i].resize(11,Profile1DPtr()); _h_dists[i][0] = bookProfile1D("A0"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); _h_dists[i][1] = bookProfile1D("A1"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); _h_dists[i][2] = bookProfile1D("A2"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); @@ -58,7 +58,7 @@ _h_dists[i][8] = bookProfile1D("fL"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); _h_dists[i][9] = bookProfile1D("fR"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); _h_dists[i][10] = bookProfile1D("f0"+tags[i],logBinEdges(100, 1.0, 0.5*sqrtS())); - _h_histos[i].resize(4,NULL); + _h_histos[i].resize(4,Histo1DPtr()); _h_histos[i][0] = bookHisto1D("thetastar"+tags[i],100,-1.0,1.0); _h_histos[i][1] = bookHisto1D("phistar"+tags[i],90,0.0,360.0); _h_histos[i][2] = bookHisto1D("thetastar_ptw20"+tags[i],100,-1.0,1.0); Modified: branches/2011-07-aida2yoda/src/Analyses/Makefile.am ============================================================================== --- branches/2011-07-aida2yoda/src/Analyses/Makefile.am Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Analyses/Makefile.am Tue Jul 19 16:22:39 2011 (r3209) @@ -3,9 +3,9 @@ LIBS = $(FASTJETCONFIGLIBADD) lib_LTLIBRARIES = -#noinst_LTLIBRARIES = libRivetAnalysisTools.la -#libRivetAnalysisTools_la_SOURCES = \ -# MC_JetAnalysis.cc +noinst_LTLIBRARIES = libRivetAnalysisTools.la +libRivetAnalysisTools_la_SOURCES = \ + MC_JetAnalysis.cc ## ANALYSIS CATEGORIES ## @@ -62,17 +62,17 @@ # endif -# lib_LTLIBRARIES += RivetCMSAnalyses.la -# RivetCMSAnalyses_la_SOURCES = \ -# CMS_2011_S8957746.cc +lib_LTLIBRARIES += RivetCMSAnalyses.la +RivetCMSAnalyses_la_SOURCES = \ + CMS_2011_S8957746.cc -# lib_LTLIBRARIES += RivetLHCbAnalyses.la -# RivetLHCbAnalyses_la_SOURCES = -# if ENABLE_UNVALIDATED -# RivetLHCbAnalyses_la_SOURCES += \ -# LHCB_2010_S8758301.cc -# endif +lib_LTLIBRARIES += RivetLHCbAnalyses.la +RivetLHCbAnalyses_la_SOURCES = +if ENABLE_UNVALIDATED +RivetLHCbAnalyses_la_SOURCES += \ + LHCB_2010_S8758301.cc +endif # lib_LTLIBRARIES += RivetCDFAnalyses.la @@ -160,10 +160,10 @@ # ZEUS_2001_S4815815.cc # endif -# lib_LTLIBRARIES += RivetPetraAnalyses.la -# RivetPetraAnalyses_la_SOURCES = \ -# JADE_1998_S3612880.cc \ -# TASSO_1990_S2148048.cc +lib_LTLIBRARIES += RivetPetraAnalyses.la +RivetPetraAnalyses_la_SOURCES = \ + JADE_1998_S3612880.cc \ + TASSO_1990_S2148048.cc # lib_LTLIBRARIES += RivetLEPAnalyses.la # RivetLEPAnalyses_la_SOURCES = \ @@ -228,19 +228,19 @@ lib_LTLIBRARIES += RivetMCAnalyses.la RivetMCAnalyses_la_SOURCES = \ - ExampleAnalysis.cc -# MC_GENERIC.cc \ -# MC_JETS.cc \ -# MC_PHOTONJETS.cc \ -# MC_HJETS.cc \ -# MC_WJETS.cc \ -# MC_WPOL.cc \ -# MC_WWJETS.cc \ -# MC_ZJETS.cc \ -# MC_ZZJETS.cc \ -# MC_LEADINGJETS.cc \ -# MC_DIPHOTON.cc \ -# MC_SUSY.cc + ExampleAnalysis.cc \ + MC_GENERIC.cc \ + MC_JETS.cc \ + MC_PHOTONJETS.cc \ + MC_HJETS.cc \ + MC_WJETS.cc \ + MC_WPOL.cc \ + MC_WWJETS.cc \ + MC_ZJETS.cc \ + MC_ZZJETS.cc \ + MC_LEADINGJETS.cc \ + MC_DIPHOTON.cc \ + MC_SUSY.cc # if ENABLE_UNVALIDATED # RivetMCAnalyses_la_SOURCES += \ # MC_DIJET.cc \ Modified: branches/2011-07-aida2yoda/src/Core/Analysis.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Core/Analysis.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Core/Analysis.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -162,21 +162,21 @@ // Histogramming - // void Analysis::_cacheBinEdges() const { - // _cacheXAxisData(); - // if (_histBinEdges.empty()) { - // MSG_TRACE("Getting histo bin edges from AIDA for paper " << name()); - // _histBinEdges = getBinEdges(_dpsData); - // } - // } + void Analysis::_cacheBinEdges() const { + _cacheXAxisData(); + if (_histBinEdges.empty()) { + MSG_TRACE("Getting histo bin edges from AIDA for paper " << name()); + _histBinEdges = getBinEdges(_dpsData); + } + } - // void Analysis::_cacheXAxisData() const { - // if (_dpsData.empty()) { - // MSG_TRACE("Getting DPS x-axis data from AIDA for paper " << name()); - // _dpsData = getDPSXValsErrs(name()); - // } - // } + void Analysis::_cacheXAxisData() const { + if (_dpsData.empty()) { + MSG_TRACE("Getting DPS x-axis data from AIDA for paper " << name()); + _dpsData = getDPSXValsErrs(name()); + } + } const BinEdges& Analysis::binEdges(const string& hname) const { @@ -360,57 +360,59 @@ - // IDataPointSet* Analysis::bookDataPointSet(const string& hname, const string& title, - // const string& xtitle, const string& ytitle) { - // _makeHistoDir(); - // const string path = histoPath(hname); - // IDataPointSet* dps = datapointsetFactory().create(path, title, 2); - // MSG_TRACE("Made data point set " << hname << " for " << name()); - // dps->setXTitle(xtitle); - // dps->setYTitle(ytitle); - // return dps; - // } + Scatter2DPtr Analysis::bookScatter2D(const string& hname, const string& title, + const string& xtitle, const string& ytitle) { + const string path = histoPath(hname); + Scatter2DPtr dps( new Scatter2D(path, title) ); + addPlot(dps); + MSG_TRACE("Made data point set " << hname << " for " << name()); + // dps->setXTitle(xtitle); + // dps->setYTitle(ytitle); + return dps; + } - // IDataPointSet* Analysis::bookDataPointSet(const string& hname, - // size_t npts, double lower, double upper, - // const string& title, - // const string& xtitle, const string& ytitle) { - // IDataPointSet* dps = bookDataPointSet(hname, title, xtitle, ytitle); - // for (size_t pt = 0; pt < npts; ++pt) { - // const double binwidth = (upper-lower)/npts; - // const double bincentre = lower + (pt + 0.5) * binwidth; - // dps->addPoint(); - // IMeasurement* meas = dps->point(pt)->coordinate(0); - // meas->setValue(bincentre); - // meas->setErrorPlus(binwidth/2.0); - // meas->setErrorMinus(binwidth/2.0); - // } - // return dps; - // } + Scatter2DPtr Analysis::bookScatter2D(const string& hname, + size_t npts, double lower, double upper, + const string& title, + const string& xtitle, const string& ytitle) { + Scatter2DPtr dps = bookScatter2D(hname, title, xtitle, ytitle); + const double binwidth = (upper-lower)/npts; + for (size_t pt = 0; pt < npts; ++pt) { + const double bincentre = lower + (pt + 0.5) * binwidth; + // \todo YODA check + dps->addPoint(bincentre, 0, binwidth/2.0, 0); + // IMeasurement* meas = dps->point(pt)->coordinate(0); + // meas->setValue(bincentre); + // meas->setErrorPlus(binwidth/2.0); + // meas->setErrorMinus(binwidth/2.0); + } + return dps; + } - // IDataPointSet* Analysis::bookDataPointSet(size_t datasetId, size_t xAxisId, - // size_t yAxisId, const string& title, - // const string& xtitle, const string& ytitle) { - // // Get the bin edges (only read the AIDA file once) - // _cacheXAxisData(); - // // Build the axis code - // const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); - // //const map<string, vector<DPSXPoint> > xpoints = getDPSXValsErrs(papername); - // MSG_TRACE("Using DPS x-positions for " << name() << ":" << axisCode); - // IDataPointSet* dps = bookDataPointSet(axisCode, title, xtitle, ytitle); - // const vector<DPSXPoint> xpts = _dpsData.find(axisCode)->second; - // for (size_t pt = 0; pt < xpts.size(); ++pt) { - // dps->addPoint(); - // IMeasurement* meas = dps->point(pt)->coordinate(0); - // meas->setValue(xpts[pt].val); - // meas->setErrorPlus(xpts[pt].errplus); - // meas->setErrorMinus(xpts[pt].errminus); - // } - // MSG_TRACE("Made DPS " << axisCode << " for " << name()); - // return dps; - // } + Scatter2DPtr Analysis::bookScatter2D(size_t datasetId, size_t xAxisId, + size_t yAxisId, const string& title, + const string& xtitle, const string& ytitle) { + // Get the bin edges (only read the AIDA file once) + // _cacheXAxisData(); + // Build the axis code + const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); + //const map<string, vector<DPSXPoint> > xpoints = getDPSXValsErrs(papername); + MSG_TRACE("Using DPS x-positions for " << name() << ":" << axisCode); + Scatter2DPtr dps = bookScatter2D(axisCode, title, xtitle, ytitle); + const vector<DPSXPoint> xpts = _dpsData.find(axisCode)->second; + for (size_t pt = 0; pt < xpts.size(); ++pt) { + // \todo YODA check + dps->addPoint(xpts[pt].val, xpts[pt].errminus, xpts[pt].errplus, 0, 0, 0); + // IMeasurement* meas = dps->point(pt)->coordinate(0); + // meas->setValue(xpts[pt].val); + // meas->setErrorPlus(xpts[pt].errplus); + // meas->setErrorMinus(xpts[pt].errminus); + } + MSG_TRACE("Made DPS " << axisCode << " for " << name()); + return dps; + } void Analysis::normalize(Histo1DPtr histo, double norm) { @@ -469,7 +471,9 @@ // tree().mkdir("/tmpnormalize"); // tree().mv(hpath, "/tmpnormalize"); - // AIDA::IDataPointSet* dps = datapointsetFactory().createXY(hpath, title, x, y, ex, ey); + Scatter2DPtr dps( new Scatter2D(x, y, ex, ey, hpath, title) ); + addPlot(dps); + // dps->setXTitle(xtitle); // dps->setYTitle(ytitle); @@ -547,7 +551,7 @@ // tree().mkdir("/tmpnormalize"); // tree().mv(hpath, "/tmpnormalize"); - // AIDA::IDataPointSet* dps = + // Scatter2DPtr dps = // datapointsetFactory().createXYZ(hpath, title, x, y, z, ex, ey, ez); // dps->setXTitle(xtitle); // dps->setYTitle(ytitle); Modified: branches/2011-07-aida2yoda/src/Makefile.am ============================================================================== --- branches/2011-07-aida2yoda/src/Makefile.am Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Makefile.am Tue Jul 19 16:22:39 2011 (r3209) @@ -18,7 +18,7 @@ Core/libRivetCore.la \ Projections/libRivetProjections.la \ Tools/libRivetTools.la \ - -ldl -lm -lYODA -lHepMC \ + Analyses/libRivetAnalysisTools.la \ + -ldl -lm -lYODA -lHepMC \ $(GSL_LDFLAGS) $(FASTJETCONFIGLIBADD) -## Analyses/libRivetAnalysisTools.la \ Modified: branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc ============================================================================== --- branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc Tue Jul 19 15:03:30 2011 (r3208) +++ branches/2011-07-aida2yoda/src/Tools/RivetYODA.cc Tue Jul 19 16:22:39 2011 (r3209) @@ -3,21 +3,12 @@ // #include "Rivet/RivetBoost.hh" // #include "Rivet/Tools/Utils.hh" #include "Rivet/Tools/RivetPaths.hh" -// #include "LWH/AnalysisFactory.h" -// #include "TinyXML/tinyxml.h" // #include <sstream> // #include <stdexcept> using namespace std; namespace Rivet { - - // /// Get an AIDA system (LWH impl.) - // AIDA::IAnalysisFactory* createAnalysisFactory() { - // return new LWH::AnalysisFactory(); - // } - - string getDatafilePath(string papername) { const string path = findAnalysisRefFile(papername + ".dat"); if (!path.empty()) return path; @@ -27,9 +18,9 @@ } - // map<string, vector<DPSXYPoint> > getDPSXYValsErrs(string papername) { - // // Get filename - // const string xmlfile = getDatafilePath(papername); + map<string, vector<DPSXYPoint> > getDPSXYValsErrs(string papername) { + // Get filename + const string xmlfile = getDatafilePath(papername); // // Open AIDA XML file // TiXmlDocument doc(xmlfile); @@ -41,9 +32,9 @@ // throw Error(err); // } - // // Return value, to be populated - // map<string, vector<DPSXYPoint> > rtn; - + // Return value, to be populated + map<string, vector<DPSXYPoint> > rtn; + // \todo YODA // try { // // Walk down tree to get to the <paper> element // const TiXmlNode* aidaN = doc.FirstChild("aida"); @@ -105,13 +96,13 @@ // throw; // } - // // Return - // return rtn; - // } - - // map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername) { - // // Get filename - // const string xmlfile = getDatafilePath(papername); + // Return + return rtn; + } + + map<string, vector<DPSXPoint> > getDPSXValsErrs(string papername) { + // Get filename + const string xmlfile = getDatafilePath(papername); // // Open AIDA XML file // TiXmlDocument doc(xmlfile); @@ -123,8 +114,8 @@ // throw Error(err); // } - // // Return value, to be populated - // map<string, vector<DPSXPoint> > rtn; + // Return value, to be populated + map<string, vector<DPSXPoint> > rtn; // try { // // Walk down tree to get to the <paper> element @@ -178,57 +169,57 @@ // throw; // } - // // Return - // return rtn; - // } - - - - // map<string, BinEdges> getBinEdges(string papername) { - // const map<string, vector<DPSXPoint> > xpoints = getDPSXValsErrs(papername); - // return getBinEdges(xpoints); - // } + // Return + return rtn; + } - // map<string, BinEdges> getBinEdges(const map<string, vector<DPSXPoint> >& xpoints) { + map<string, BinEdges> getBinEdges(string papername) { + const map<string, vector<DPSXPoint> > xpoints = getDPSXValsErrs(papername); + return getBinEdges(xpoints); + } - // map<string, BinEdges> rtn; - // for (map<string, vector<DPSXPoint> >::const_iterator dsit = xpoints.begin(); dsit != xpoints.end(); ++dsit) { - // const string plotname = dsit->first; - // list<double> edges; - // foreach (const DPSXPoint& xpt, dsit->second) { - // const double lowedge = xpt.val - xpt.errminus; - // const double highedge = xpt.val + xpt.errplus; - // edges.push_back(lowedge); - // edges.push_back(highedge); - // } - // //cout << "*** " << edges << endl; - // // Remove duplicates (the careful testing is why we haven't used a set) - // //cout << edges.size() << " edges -> " << edges.size()/2 << " bins" << endl; - // for (list<double>::iterator e = edges.begin(); e != edges.end(); ++e) { - // list<double>::iterator e2 = e; - // while (e2 != edges.end()) { - // if (e != e2) { - // if (fuzzyEquals(*e, *e2)) { - // edges.erase(e2++); - // } - // } - // ++e2; - // } - // } - // //cout << edges.size() << " edges after dups removal (should be #bins+1)" << endl; - // //cout << "@@@ " << edges << endl; + map<string, BinEdges> getBinEdges(const map<string, vector<DPSXPoint> >& xpoints) { - // // Add to the map - // rtn[plotname] = BinEdges(edges.begin(), edges.end()); - // } + map<string, BinEdges> rtn; + for (map<string, vector<DPSXPoint> >::const_iterator dsit = xpoints.begin(); dsit != xpoints.end(); ++dsit) { + const string plotname = dsit->first; + list<double> edges; + foreach (const DPSXPoint& xpt, dsit->second) { + const double lowedge = xpt.val - xpt.errminus; + const double highedge = xpt.val + xpt.errplus; + edges.push_back(lowedge); + edges.push_back(highedge); + } + + //cout << "*** " << edges << endl; + + // Remove duplicates (the careful testing is why we haven't used a set) + //cout << edges.size() << " edges -> " << edges.size()/2 << " bins" << endl; + for (list<double>::iterator e = edges.begin(); e != edges.end(); ++e) { + list<double>::iterator e2 = e; + while (e2 != edges.end()) { + if (e != e2) { + if (fuzzyEquals(*e, *e2)) { + edges.erase(e2++); + } + } + ++e2; + } + } + //cout << edges.size() << " edges after dups removal (should be #bins+1)" << endl; + //cout << "@@@ " << edges << endl; + + // Add to the map + rtn[plotname] = BinEdges(edges.begin(), edges.end()); + } - // // Return - // return rtn; - // } + // Return + return rtn; + } }
More information about the Rivet-svn mailing list |