|
[Rivet-svn] r2148 - in trunk: bin data/anainfo src/Analysesblackhole at projects.hepforge.org blackhole at projects.hepforge.orgTue Dec 8 15:35:46 GMT 2009
Author: buckley Date: Tue Dec 8 15:35:45 2009 New Revision: 2148 Log: UA5 1988 improvements: Py6 doesn't match data Modified: trunk/bin/gap_removal trunk/data/anainfo/UA5_1988_S1867512.info trunk/src/Analyses/UA5_1988_S1867512.cc trunk/src/Analyses/UA5_1989_S1926373.cc Modified: trunk/bin/gap_removal ============================================================================== --- trunk/bin/gap_removal Tue Dec 8 14:04:50 2009 (r2147) +++ trunk/bin/gap_removal Tue Dec 8 15:35:45 2009 (r2148) @@ -16,9 +16,11 @@ self.histos[title] = Histogram(f) f.close() + class Histogram: def __init__(self, f): self.read_input(f) + def read_input(self, f): self.description = {} self.data = [] @@ -43,35 +45,39 @@ 'UpEdge': float(linearray[1]), 'Content': float(linearray[2]), 'Error': [float(linearray[3]),float(linearray[4])]}) + def write_datapoint(self, f, xval, xerr, yval, yerr): - f.write(' <dataPoint>\n') - f.write(' <measurement errorPlus="%e" value="%e" errorMinus="%e"/>\n' %(xerr, xval, xerr)) - f.write(' <measurement errorPlus="%e" value="%e" errorMinus="%e"/>\n' %(yerr[1], yval, yerr[0])) - f.write(' </dataPoint>\n') + f.write(' <dataPoint>\n') + f.write(' <measurement errorPlus="%e" value="%e" errorMinus="%e"/>\n' %(xerr, xval, xerr)) + f.write(' <measurement errorPlus="%e" value="%e" errorMinus="%e"/>\n' %(yerr[1], yval, yerr[0])) + f.write(' </dataPoint>\n') + def write_datapointset_header(self, f): - path = self.description['AidaPath'] - title = self.description['Title'].replace('>', '>').replace('<', '<').replace('"', '"') - try: - xlabel = self.description['XLabel'].replace('>', '>').replace('<', '<').replace('"', '"') - except: - xlabel = '' - try: - ylabel = self.description['YLabel'].replace('>', '>').replace('<', '<').replace('"', '"') - except: - ylabel = '' - f.write(' <dataPointSet name="%s" dimension="2"\n' % path.split('/')[-1]) - f.write(' path="%s" title="%s">\n' %(os.path.abspath(path.replace(path.split('/')[-1], '')), title)) - f.write(' <annotation>\n') - f.write(' <item key="Title" value="%s" sticky="true"/>\n' %title) - f.write(' <item key="XLabel" value="%s" sticky="true"/>\n' %xlabel) - f.write(' <item key="YLabel" value="%s" sticky="true"/>\n' %ylabel) - f.write(' <item key="AidaPath" value="%s" sticky="true"/>\n' %(path)) - f.write(' <item key="FullPath" value="/%s.aida%s" sticky="true"/>\n' %(filename.split('/')[-1], path)) - f.write(' </annotation>\n') - f.write(' <dimension dim="0" title="%s" />\n' %xlabel) - f.write(' <dimension dim="1" title="%s" />\n' %ylabel) + title = self.description.setdefault('Title', None) + xlabel = self.description.setdefault('XLabel', None) + ylabel = self.description.setdefault('YLabel', None) + path = self.description.setdefault('AidaPath', None) + if path is not None: + path = path.replace('>', '>').replace('<', '<').replace('"', '"') + f.write(' <dataPointSet name="%s" dimension="2"\n' % path.split('/')[-1]) + f.write(' path="%s" title="%s">\n' % (os.path.abspath(path.replace(path.split('/')[-1], '')), + title.replace('>', '>').replace('<', '<').replace('"', '"'))) + f.write(' <annotation>\n') + if title is not None: + f.write(' <item key="Title" value="%s" sticky="true"/>\n' % title.replace('>', '>').replace('<', '<').replace('"', '"')) + if xlabel is not None: + f.write(' <item key="XLabel" value="%s" sticky="true"/>\n' % xlabel.replace('>', '>').replace('<', '<').replace('"', '"')) + if ylabel is not None: + f.write(' <item key="YLabel" value="%s" sticky="true"/>\n' % ylabel.replace('>', '>').replace('<', '<').replace('"', '"')) + f.write(' <item key="AidaPath" value="%s" sticky="true"/>\n' % path) + f.write(' <item key="FullPath" value="/%s.aida%s" sticky="true"/>\n' % (filename.split('/')[-1], path)) + f.write(' </annotation>\n') + f.write(' <dimension dim="0" title="%s" />\n' % xlabel) + f.write(' <dimension dim="1" title="%s" />\n' % ylabel) + def write_datapointset_footer(self, f): - f.write(' </dataPointSet>\n') + f.write(' </dataPointSet>\n') + def write_datapointset(self, f): self.write_datapointset_header(f) for bin, bindata in enumerate(self.data): @@ -84,6 +90,7 @@ yerr = bindata['Error'] self.write_datapoint(f, xval, xerr, yval, yerr) self.write_datapointset_footer(f) + def remove_gaps(self): # only look at histograms which are present in the reference file: try: Modified: trunk/data/anainfo/UA5_1988_S1867512.info ============================================================================== --- trunk/data/anainfo/UA5_1988_S1867512.info Tue Dec 8 14:04:50 2009 (r2147) +++ trunk/data/anainfo/UA5_1988_S1867512.info Tue Dec 8 15:35:45 2009 (r2148) @@ -1,6 +1,6 @@ Name: UA5_1988_S1867512 Year: 1988 -Summary: Charged particle correlations in non-single-diffractive events (ppbar) of the UA5 detector measured at c.m. energies of 200, 546 and 900 GeV. +Summary: Charged particle correlations in ppbar non-single-diffractive events of the UA5 detector at sqrt(s) = 200, 546 and 900 GeV. Experiment: UA5 Collider: CERN SPS SpiresID: 1867512 @@ -10,8 +10,21 @@ References: - Z.Phys.C37:191-213,1988 RunInfo: - PPbar events, non-single diffractive events need to be switched on. The trigger implementation is the same as in UA5_1989_S1926373. + ppbar events: non-single diffractive events need to be switched on. The trigger + implementation is the same as in UA5_1989_S1926373. NumEvents: 1000000 PtCuts: [0] Description: - We present data on two-particle pseudorapidity and multiplicity correlations of charged particles for non single-diffractive $$p\bar p - collisions$$ at c.m. energies of 200, 546 and 900 GeV. Pseudorapidity correlations interpreted in terms of a cluster model, which has been motivated by this and other experiments, require on average about two charged particles per cluster. The decay width of the clusters in pseudorapidity is approximately independent of multiplicity and of c.m. energy. The investigations of correlations in terms of pseudorapidity gaps confirm the picture of cluster production. The strength of forward-backward multiplicity correlations increases linearly with ins and depends strongly on position and size of the pseudorapidity gap separating the forward and backward interval. All our correlation studies can be understood in terms of a cluster model in which clusters contain on average about two charged particles, i.e. are of similar magnitude to earlier est imates from the ISR. + Data on two-particle pseudorapidity and multiplicity correlations + of charged particles for non single-diffractive $$p\bar p - collisions$$ at + c.m. energies of 200, 546 and 900 GeV. Pseudorapidity correlations interpreted + in terms of a cluster model, which has been motivated by this and other + experiments, require on average about two charged particles per cluster. + The decay width of the clusters in pseudorapidity is approximately independent + of multiplicity and of c.m. energy. The investigations of correlations in terms + of pseudorapidity gaps confirm the picture of cluster production. The strength + of forward-backward multiplicity correlations increases linearly with ins and + depends strongly on position and size of the pseudorapidity gap separating + the forward and backward interval. All our correlation studies can be understood + in terms of a cluster model in which clusters contain on average about two + charged particles, i.e. are of similar magnitude to earlier estimates from the ISR. Modified: trunk/src/Analyses/UA5_1988_S1867512.cc ============================================================================== --- trunk/src/Analyses/UA5_1988_S1867512.cc Tue Dec 8 14:04:50 2009 (r2147) +++ trunk/src/Analyses/UA5_1988_S1867512.cc Tue Dec 8 15:35:45 2009 (r2148) @@ -7,6 +7,13 @@ #include "Rivet/Projections/TriggerUA5.hh" namespace Rivet { + + + namespace { + inline double cov_w_mean(int m, double m_mean, int n, double n_mean) { + return (m - m_mean)*(n - n_mean); + } + } class UA5_1988_S1867512 : public Analysis { @@ -15,13 +22,9 @@ UA5_1988_S1867512() : Analysis("UA5_1988_S1867512") { setBeams(PROTON, ANTIPROTON); + _sumWPassed = 0; } - - - inline double cov_w_mean(int m, double m_mean, int n, double n_mean) { - return (m - m_mean)*(n - n_mean); - } - + /// Calculate the correlation strength between two samples inline double c_str(int m, double m_mean, int n, double n_mean) { @@ -32,10 +35,13 @@ const double corr_strength = correlation*sqrt(var2/var1); return corr_strength; } + + /// @name Analysis methods //@{ void init() { + // Projections addProjection(TriggerUA5(), "Trigger"); addProjection(Beam(), "Beams"); @@ -62,24 +68,25 @@ addProjection(ChargedFinalState(-4.0, -3.0), "CFS40B"); // Histogram booking, we have sqrt(s) = 200, 546 and 900 GeV - _hist_correl_200 = bookProfile1D(2, 1, 1); - _hist_correl_546 = bookProfile1D(2, 1, 2); - _hist_correl_900 = bookProfile1D(2, 1, 3); - - _hist_correl_asym_200 = bookProfile1D(3, 1, 1); - _hist_correl_asym_546 = bookProfile1D(3, 1, 2); - _hist_correl_asym_900 = bookProfile1D(3, 1, 3); + if (fuzzyEquals(sqrtS(), 200.0, 1E-4)) { + _hist_correl_200 = bookProfile1D(2, 1, 1); + _hist_correl_asym_200 = bookProfile1D(3, 1, 1); + } else if (fuzzyEquals(sqrtS(), 546.0, 1E-4)) { + _hist_correl_546 = bookProfile1D(2, 1, 2); + _hist_correl_asym_546 = bookProfile1D(3, 1, 2); + } else if (fuzzyEquals(sqrtS(), 900.0, 1E-4)) { + _hist_correl_900 = bookProfile1D(2, 1, 3); + _hist_correl_asym_900 = bookProfile1D(3, 1, 3); + } } - void analyze(const Event& event) { - sqrtS = applyProjection<Beam>(event, "Beams").sqrtS(); - + void analyze(const Event& event) { // Trigger const bool trigger = applyProjection<TriggerUA5>(event, "Trigger").nsdDecision(); if (!trigger) vetoEvent; - + _sumWPassed += event.weight(); // Count forward/backward particles n_10f += applyProjection<ChargedFinalState>(event, "CFS10F").size(); @@ -99,7 +106,6 @@ n_40b += applyProjection<ChargedFinalState>(event, "CFS40B").size(); // n_05 += applyProjection<ChargedFinalState>(event, "CFS05").size(); - } @@ -121,11 +127,11 @@ double mean_n_35b = mean(n_35b); double mean_n_40b = mean(n_40b); - double mean_n_05 = mean(n_05) ; + double mean_n_05 = mean(n_05); // Fill histos - if (fuzzyEquals(sqrtS, 200.0, 1E-4)) { + if (fuzzyEquals(sqrtS(), 200.0, 1E-4)) { for (size_t i = 0; i < n_10f.size(); i++) { // Fill gap size histo (Fig 14), iterate over central gap size _hist_correl_200->fill(0.0, c_str(n_10f[i], mean_n_10f, n_10b[i], mean_n_10b)); @@ -155,7 +161,7 @@ } } - else if (fuzzyEquals(sqrtS, 546.0, 1E-4)) { + else if (fuzzyEquals(sqrtS(), 546.0, 1E-4)) { for (size_t i = 0; i < n_10f.size(); i++) { _hist_correl_546->fill(0.0, c_str(n_10f[i], mean_n_10f, n_10b[i], mean_n_10b)); _hist_correl_546->fill(1.0, c_str(n_15f[i], mean_n_15f, n_15b[i], mean_n_15b)); @@ -180,7 +186,7 @@ } } - else if (fuzzyEquals(sqrtS, 900.0, 1E-4)) { + else if (fuzzyEquals(sqrtS(), 900.0, 1E-4)) { for (size_t i = 0; i < n_10f.size(); i++) { _hist_correl_900->fill(0.0, c_str(n_10f[i], mean_n_10f, n_10b[i], mean_n_10b)); _hist_correl_900->fill(1.0, c_str(n_15f[i], mean_n_15f, n_15b[i], mean_n_15b)); @@ -213,13 +219,15 @@ private: - // CoM energy - double sqrtS; + /// @name Counters + //@{ + double _sumWPassed; + //@} + /// @name Vectors for storing the number of particles in the different eta intervals per event. /// @todo Is there a better way? - //@{ - + //@{ std::vector<int> n_10f; std::vector<int> n_15f; std::vector<int> n_20f; @@ -227,7 +235,7 @@ std::vector<int> n_30f; std::vector<int> n_35f; std::vector<int> n_40f; - + // std::vector<int> n_10b; std::vector<int> n_15b; std::vector<int> n_20b; @@ -235,9 +243,8 @@ std::vector<int> n_30b; std::vector<int> n_35b; std::vector<int> n_40b; - + // std::vector<int> n_05; - //@} Modified: trunk/src/Analyses/UA5_1989_S1926373.cc ============================================================================== --- trunk/src/Analyses/UA5_1989_S1926373.cc Tue Dec 8 14:04:50 2009 (r2147) +++ trunk/src/Analyses/UA5_1989_S1926373.cc Tue Dec 8 15:35:45 2009 (r2148) @@ -15,7 +15,7 @@ /// Constructor UA5_1989_S1926373() : Analysis("UA5_1989_S1926373") { setBeams(PROTON, ANTIPROTON); - _numVetoed = 0; + _sumWPassed = 0; } @@ -57,6 +57,7 @@ const double sqrtS = applyProjection<Beam>(event, "Beams").sqrtS(); const double weight = event.weight(); + _sumWPassed += weight; // Count final state particles in several eta regions const int numP05 = applyProjection<ChargedFinalState>(event, "CFS05").size(); @@ -86,35 +87,32 @@ void finalize() { - // Normalise to area of refhistos - /// @todo Use generator cross-sections - normalize(_hist_nch200, 2.011); - normalize(_hist_nch900, 2.0434); - normalize(_hist_nch200eta05, 1.01255); - normalize(_hist_nch200eta15, 1.0191); - normalize(_hist_nch200eta30, 1.02615); - normalize(_hist_nch200eta50, 1.03475); - normalize(_hist_nch900eta05, 1.0035); - normalize(_hist_nch900eta15, 1.01405); - normalize(_hist_nch900eta30, 1.03055); - normalize(_hist_nch900eta50, 1.02791); - // Scale to total number of weights - scale(_hist_mean_nch_200, 1.0/sumOfWeights()); - scale(_hist_mean_nch_900, 1.0/sumOfWeights()); - - // Print trigger statistics - getLog() << Log::INFO << "No. events vetoed: " << _numVetoed << endl; - getLog() << Log::INFO << "No. events accepted: " << sumOfWeights() - _numVetoed << endl; - getLog() << Log::INFO << "Relative trigger rate: " << 100.0*(sumOfWeights() - _numVetoed)/sumOfWeights() << "%" << endl; + scale(_hist_nch200, _sumWPassed); + scale(_hist_nch900, _sumWPassed); + scale(_hist_nch200eta05, _sumWPassed); + scale(_hist_nch200eta15, _sumWPassed); + scale(_hist_nch200eta30, _sumWPassed); + scale(_hist_nch200eta50, _sumWPassed); + scale(_hist_nch900eta05, _sumWPassed); + scale(_hist_nch900eta15, _sumWPassed); + scale(_hist_nch900eta30, _sumWPassed); + scale(_hist_nch900eta50, _sumWPassed); + scale(_hist_mean_nch_200, 1.0/_sumWPassed); + scale(_hist_mean_nch_900, 1.0/_sumWPassed); } //@} private: - + + /// @name Counters + //@{ + double _sumWPassed; + //@} + + /// @name Histograms //@{ - /// Histograms AIDA::IHistogram1D* _hist_nch200; AIDA::IHistogram1D* _hist_nch900; AIDA::IHistogram1D* _hist_nch200eta05; @@ -129,7 +127,6 @@ AIDA::IHistogram1D* _hist_mean_nch_900; //@} - unsigned int _numVetoed; };
More information about the Rivet-svn mailing list |