|
[Rivet-svn] r2133 - in trunk: . include/Rivet include/Rivet/Projections include/Rivet/Tools src/Analyses src/Coreblackhole at projects.hepforge.org blackhole at projects.hepforge.orgFri Dec 4 14:14:36 GMT 2009
Author: buckley Date: Fri Dec 4 14:14:36 2009 New Revision: 2133 Log: Providing 'make pyclean', improvements to OPAL_2004 (just M_H/L waiting now), improvements/additions to Hemispheres interface, and warning at runtime if unvalidated analyses are being used. Modified: trunk/ChangeLog trunk/Makefile.am trunk/include/Rivet/AnalysisHandler.hh trunk/include/Rivet/Projections/Hemispheres.hh trunk/include/Rivet/Tools/Logging.hh trunk/src/Analyses/DELPHI_1996_S3430090.cc trunk/src/Analyses/OPAL_2004_S6132243.cc trunk/src/Core/AnalysisHandler.cc trunk/src/Core/Run.cc Modified: trunk/ChangeLog ============================================================================== --- trunk/ChangeLog Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/ChangeLog Fri Dec 4 14:14:36 2009 (r2133) @@ -1,5 +1,16 @@ 2009-12-04 Andy Buckley <andy at insectnation.org> + * Improved Hemispheres interface to remove unnecessary consts on + returned doubles, and to also return non-squared versions + of (scaled) hemisphere masses. + + * Add "make pyclean" make target at the top level to make it + easier for developers to clean their Python module build when the + API is extended. + + * Identify use of unvalidated analyses with a warning message at + runtime. + * Providing Analysis::sqrtS() and Analysis::beams(), and making sure they're available by the time the init methods are called. Modified: trunk/Makefile.am ============================================================================== --- trunk/Makefile.am Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/Makefile.am Fri Dec 4 14:14:36 2009 (r2133) @@ -6,7 +6,7 @@ doc: cd doc && $(MAKE) doc -.PHONY : doc dox +.PHONY : doc dox pyclean clean-local: @rm -rf a.out @@ -24,6 +24,9 @@ test ! -d $(DESTDIR)$(pkgdatadir) || rmdir --ignore-fail-on-non-empty $(DESTDIR)$(pkgdatadir) endif +pyclean: + cd pyext && $(MAKE) clean + ## Remove SVN dirs dist-hook: @rm -rf `find $(distdir) -name ".svn"` Modified: trunk/include/Rivet/AnalysisHandler.hh ============================================================================== --- trunk/include/Rivet/AnalysisHandler.hh Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/include/Rivet/AnalysisHandler.hh Fri Dec 4 14:14:36 2009 (r2133) @@ -125,11 +125,16 @@ //@} - /// @name handle analyses + /// @name Handle analyses //@{ /// Get a list of the currently registered analyses' names. - std::vector<std::string> analysisNames(); + std::vector<std::string> analysisNames() const; + + /// Get a list of the currently registered analyses' names. + const std::set<Analysis*>& analyses() const { + return _analyses; + } /// Add an analysis to the run list using its name. The actual Analysis /// to be used will be obtained via AnalysisHandler::getAnalysis(string). Modified: trunk/include/Rivet/Projections/Hemispheres.hh ============================================================================== --- trunk/include/Rivet/Projections/Hemispheres.hh Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/include/Rivet/Projections/Hemispheres.hh Fri Dec 4 14:14:36 2009 (r2133) @@ -92,39 +92,53 @@ /// @name Hemisphere masses (scaled by \f$ 1 / E^2_\mathrm{vis} \f$). ///@{ - const double E2vis() const { return _E2vis; } - const double M2high() const { return _M2high; } - const double M2low() const { return _M2low; } - const double M2diff() const { return _M2high -_M2low; } - const double scaledM2high() const { + + double E2vis() const { return _E2vis; } + double Evis() const { return sqrt(_E2vis); } + + double M2high() const { return _M2high; } + double Mhigh() const { return sqrt(M2high()); } + + double M2low() const { return _M2low; } + double Mlow() const { return sqrt(M2low()); } + + double M2diff() const { return _M2high -_M2low; } + double Mdiff() const { return sqrt(M2diff()); } + + double scaledM2high() const { if (_M2high == 0.0) return 0.0; if (_E2vis != 0.0) return _M2high/_E2vis; else return std::numeric_limits<double>::max(); } - const double scaledM2low() const { + double scaledMhigh() const { return sqrt(scaledM2high()); } + + double scaledM2low() const { if (_M2low == 0.0) return 0.0; if (_E2vis != 0.0) return _M2low/_E2vis; else return std::numeric_limits<double>::max(); } - const double scaledM2diff() const { + double scaledMlow() const { return sqrt(scaledM2low()); } + + double scaledM2diff() const { if (M2diff() == 0.0) return 0.0; if (_E2vis != 0.0) return M2diff()/_E2vis; else return std::numeric_limits<double>::max(); } + double scaledMdiff() const { return sqrt(scaledM2diff()); } ///@} /// @name Hemisphere broadenings. ///@{ - const double Bmax() const { return _Bmax; } - const double Bmin() const { return _Bmin; } - const double Bsum() const { return _Bmax + _Bmin; } - const double Bdiff() const { return fabs(_Bmax - _Bmin); } // <- fabs(), just in case... + double Bmax() const { return _Bmax; } + double Bmin() const { return _Bmin; } + double Bsum() const { return _Bmax + _Bmin; } + double Bdiff() const { return fabs(_Bmax - _Bmin); } // <- fabs(), just in case... ///@} /// Is the hemisphere with the max mass the same as the one with the max broadening? - const bool massMatchesBroadening() { + bool massMatchesBroadening() { return _highMassEqMaxBroad; } @@ -145,6 +159,7 @@ }; + } #endif Modified: trunk/include/Rivet/Tools/Logging.hh ============================================================================== --- trunk/include/Rivet/Tools/Logging.hh Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/include/Rivet/Tools/Logging.hh Fri Dec 4 14:14:36 2009 (r2133) @@ -11,7 +11,7 @@ /// Log priority levels. enum Level { - TRACE = 0, DEBUG = 10, INFO = 20, WARN = 30, ERROR = 40 + TRACE = 0, DEBUG = 10, INFO = 20, WARN = 30, WARNING = 30, ERROR = 40 }; /// Typedef for a collection of named logs. Modified: trunk/src/Analyses/DELPHI_1996_S3430090.cc ============================================================================== --- trunk/src/Analyses/DELPHI_1996_S3430090.cc Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/src/Analyses/DELPHI_1996_S3430090.cc Fri Dec 4 14:14:36 2009 (r2133) @@ -50,6 +50,7 @@ void init() { addProjection(Beam(), "Beams"); + /// @todo pTmin and |eta| cuts const ChargedFinalState cfs; addProjection(cfs, "FS"); addProjection(UnstableFinalState(), "UFS"); Modified: trunk/src/Analyses/OPAL_2004_S6132243.cc ============================================================================== --- trunk/src/Analyses/OPAL_2004_S6132243.cc Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/src/Analyses/OPAL_2004_S6132243.cc Fri Dec 4 14:14:36 2009 (r2133) @@ -17,8 +17,9 @@ public: /// Constructor - OPAL_2004_S6132243() : Analysis("OPAL_2004_S6132243"), - _isqrts(-1), _sumPassedWeights(0.0) + OPAL_2004_S6132243() + : Analysis("OPAL_2004_S6132243"), + _isqrts(-1), _sumWTrack2(0.0), _sumWJet3(0.0) { // } @@ -52,7 +53,8 @@ void init() { // Projections addProjection(Beam(), "Beams"); - const ChargedFinalState cfs; + /// @todo pTmin and |eta| cuts + const ChargedFinalState cfs(-2, 2, 0.15*GeV); addProjection(cfs, "FS"); addProjection(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets"); addProjection(Sphericity(cfs), "Sphericity"); @@ -102,7 +104,7 @@ // Increment passed-cuts weight sum const double weight = event.weight(); - _sumPassedWeights += weight; + _sumWTrack2 += weight; // Thrusts const Thrust& thrust = applyProjection<Thrust>(event, "Thrust"); @@ -120,8 +122,8 @@ // Jets const FastJets& durjet = applyProjection<FastJets>(event, "DurhamJets"); if (durjet.clusterSeq()) { - /// @todo Need separate normalisation due to clusterseq / 3 jet requirement? - const double y23 = durjet.clusterSeq()->exclusive_ymerge(3); + _sumWJet3 += weight; + const double y23 = durjet.clusterSeq()->exclusive_ymerge(2); _histY23Durham[_isqrts]->fill(y23, weight); for (int n = 1; n <= 5; ++n) { _histY23DurhamMom[_isqrts]->fill(n, pow(y23, n)*weight); @@ -150,8 +152,8 @@ // Hemispheres const Hemispheres& hemi = applyProjection<Hemispheres>(event, "Hemispheres"); - const double hemi_mh = hemi.scaledM2high(); - const double hemi_ml = hemi.scaledM2low(); + const double hemi_mh = hemi.Mhigh()/sqrtS(); + const double hemi_ml = hemi.Mlow()/sqrtS(); const double hemi_bmax = hemi.Bmax(); const double hemi_bmin = hemi.Bmin(); const double hemi_bsum = hemi.Bsum(); @@ -171,33 +173,33 @@ void finalize() { - normalize(_hist1MinusT[_isqrts]); - normalize(_histTMajor[_isqrts]); - normalize(_histTMinor[_isqrts]); - normalize(_histOblateness[_isqrts]); - normalize(_histSphericity[_isqrts]); - normalize(_histAplanarity[_isqrts]); - normalize(_histHemiMassH[_isqrts]); - normalize(_histHemiMassL[_isqrts]); - normalize(_histHemiBroadW[_isqrts]); - normalize(_histHemiBroadN[_isqrts]); - normalize(_histHemiBroadT[_isqrts]); - normalize(_histCParam[_isqrts]); - normalize(_histDParam[_isqrts]); - normalize(_histY23Durham[_isqrts]); + scale(_hist1MinusT[_isqrts], 1.0/_sumWTrack2); + scale(_histTMajor[_isqrts], 1.0/_sumWTrack2); + scale(_histTMinor[_isqrts], 1.0/_sumWTrack2); + scale(_histOblateness[_isqrts], 1.0/_sumWTrack2); + scale(_histSphericity[_isqrts], 1.0/_sumWTrack2); + scale(_histAplanarity[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiMassH[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiMassL[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadW[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadN[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadT[_isqrts], 1.0/_sumWTrack2); + scale(_histCParam[_isqrts], 1.0/_sumWTrack2); + scale(_histDParam[_isqrts], 1.0/_sumWTrack2); + scale(_histY23Durham[_isqrts], 1.0/_sumWJet3); // - scale(_hist1MinusTMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histTMajorMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histTMinorMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histOblatenessMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histSphericityMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histHemiMassHMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histHemiMassLMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histHemiBroadWMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histHemiBroadNMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histHemiBroadTMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histCParamMom[_isqrts], 1.0/_sumPassedWeights); - scale(_histY23DurhamMom[_isqrts], 1.0/_sumPassedWeights); + scale(_hist1MinusTMom[_isqrts], 1.0/_sumWTrack2); + scale(_histTMajorMom[_isqrts], 1.0/_sumWTrack2); + scale(_histTMinorMom[_isqrts], 1.0/_sumWTrack2); + scale(_histOblatenessMom[_isqrts], 1.0/_sumWTrack2); + scale(_histSphericityMom[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiMassHMom[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiMassLMom[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadWMom[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadNMom[_isqrts], 1.0/_sumWTrack2); + scale(_histHemiBroadTMom[_isqrts], 1.0/_sumWTrack2); + scale(_histCParamMom[_isqrts], 1.0/_sumWTrack2); + scale(_histY23DurhamMom[_isqrts], 1.0/_sumWJet3); } //@} @@ -205,13 +207,16 @@ private: - // Beam energy index for histograms + /// Beam energy index for histograms int _isqrts; - // Counter of event weights passing the cuts - double _sumPassedWeights; + /// @name Counters of event weights passing the cuts + //@{ + double _sumWTrack2, _sumWJet3; + //@} - // Event shape histos at 4 energies + /// @name Event shape histos at 4 energies + //@{ AIDA::IHistogram1D* _hist1MinusT[4]; AIDA::IHistogram1D* _histHemiMassH[4]; AIDA::IHistogram1D* _histCParam[4]; @@ -226,8 +231,10 @@ AIDA::IHistogram1D* _histHemiMassL[4]; AIDA::IHistogram1D* _histHemiBroadN[4]; AIDA::IHistogram1D* _histDParam[4]; + //@} - // Event shape moment histos at 4 energies + /// @name Event shape moment histos at 4 energies + //@{ AIDA::IHistogram1D* _hist1MinusTMom[4]; AIDA::IHistogram1D* _histHemiMassHMom[4]; AIDA::IHistogram1D* _histCParamMom[4]; @@ -240,6 +247,7 @@ AIDA::IHistogram1D* _histOblatenessMom[4]; AIDA::IHistogram1D* _histHemiMassLMom[4]; AIDA::IHistogram1D* _histHemiBroadNMom[4]; + //@} }; Modified: trunk/src/Core/AnalysisHandler.cc ============================================================================== --- trunk/src/Core/AnalysisHandler.cc Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/src/Core/AnalysisHandler.cc Fri Dec 4 14:14:36 2009 (r2133) @@ -236,7 +236,7 @@ } - std::vector<std::string> AnalysisHandler::analysisNames() { + std::vector<std::string> AnalysisHandler::analysisNames() const { std::vector<std::string> rtn; foreach (Analysis* a, _analyses) { rtn.push_back(a->name()); Modified: trunk/src/Core/Run.cc ============================================================================== --- trunk/src/Core/Run.cc Fri Dec 4 01:26:26 2009 (r2132) +++ trunk/src/Core/Run.cc Fri Dec 4 14:14:36 2009 (r2133) @@ -83,6 +83,12 @@ // Check that analyses are beam-compatible const size_t num_anas_requested = _ah.analysisNames().size(); _ah.removeIncompatibleAnalyses(beams); + foreach (const Analysis* a, _ah.analyses()) { + if (toUpper(a->status()) != "VALIDATED") { + Log::getLog("Rivet.Run") << Log::WARN + << "Analysis '" << a->name() << "' is unvalidated: be careful!" << endl; + } + } if (num_anas_requested > 0 && _ah.analysisNames().size() == 0) { Log::getLog("Rivet.Run") << Log::ERROR << "All analyses were incompatible with the first event's beams\n"
More information about the Rivet-svn mailing list |