// -*- C++ -*- #include #include #include #include "Rivet/Analysis.hh" #include "Rivet/Projections/FinalState.hh" #include "Rivet/Projections/IdentifiedFinalState.hh" #include "Rivet/Jet.hh" #include "Rivet/Projections/FastJets.hh" #include "Rivet/Math/Vector3.hh" #include "fastjet/internal/base.hh" #include "fastjet/JetDefinition.hh" #include "fastjet/AreaDefinition.hh" #include "fastjet/ClusterSequence.hh" #include "fastjet/ClusterSequenceArea.hh" #include "fastjet/PseudoJet.hh" namespace Rivet { // @brief Measurement of isolated diphoton + X differential cross-sections // // Inclusive isolated gamma gamma cross-sections, // differential in M(gg), pT(gg), dphi(gg), cos(theta*(gg)), phi*(gg), a_t(gg) // // @author Marco Delmastro class ATLAS_2012_DIPHOTONS : public Analysis { public: // Constructor ATLAS_2012_DIPHOTONS() : Analysis("ATLAS_2012_DIPHOTONS") { _eta_bins_areaoffset += 0.0, 1.5, 3.0; } public: // Book histograms and initialise projections before the run void init() { FinalState fs; addProjection(fs, "FS"); FastJets fj(fs, FastJets::KT, 0.5); _area_def = new fastjet::AreaDefinition(fastjet::VoronoiAreaSpec()); fj.useJetArea(_area_def); addProjection(fj, "KtJetsD05"); // @FIXME: this selection is buggy!!! (cuts at eta>1, does not apply pT cut!) IdentifiedFinalState photonfs(Cuts::abseta < 2.37 && Cuts::pT > 30.*GeV); // 30 GeV cut on subleading photon //IdentifiedFinalState photonfs(-2.37,2.37,30.*GeV); // etamin, etamax, ptmin // this seems to work fine! photonfs.acceptId(PID::PHOTON); addProjection(photonfs, "Photon"); //_h_M = bookHisto1D(1, 1, 1); //_h_pT = bookHisto1D(2, 1, 1); //_h_dPhi = bookHisto1D(3, 1, 1); //_h_costh = bookHisto1D(4, 1, 1); // explicit histogram definition, since I'm not reading the results from data yet static const int nbins_Mgg = 28; double binMgg[nbins_Mgg] = { 0., 20., 30., 40., 50., 60., 70., 80., 90., 100., 110., 120., 130., 140., 150., 160., 170., 180., 190., 200., 225., 250., 275., 300., 350., 400., 500., 800. }; std::vector binMgg_(nbins_Mgg); for (int i=0; i binPtgg_(nbins_Ptgg); for (int i=0; i bindPhigg_(nbins_dPhigg); for (int i=0; i binThetagg_(nbins_Thetagg); for (int i=0; i binPhiStargg_(nbins_PhiStargg); for (int i=0; i binAtgg_(nbins_Atgg); for (int i=0; i(event, "Photon").particlesByPt(); if (photons.size() < 2) { vetoEvent; } // Compute the median energy density _ptDensity.clear(); _sigma.clear(); _Njets.clear(); vector > ptDensities; vector emptyVec; ptDensities.assign(_eta_bins_areaoffset.size()-1, emptyVec); // Get jets, and corresponding jet areas const fastjet::ClusterSequenceArea* clust_seq_area = applyProjection(event, "KtJetsD05").clusterSeqArea(); foreach (const fastjet::PseudoJet& jet, applyProjection(event, "KtJetsD05").pseudoJets(0.0*GeV)) { const double aeta = fabs(jet.eta()); const double pt = jet.perp(); const double area = clust_seq_area->area(jet); if (area < 1e-3) continue; const int ieta = binIndex(aeta, _eta_bins_areaoffset); if (ieta != -1) ptDensities[ieta].push_back(pt/area); } // Compute median jet properties over the jets in the event for (size_t b = 0; b < _eta_bins_areaoffset.size()-1; ++b) { double median = 0.0; double sigma = 0.0; int Njets = 0; if (ptDensities[b].size() > 0) { std::sort(ptDensities[b].begin(), ptDensities[b].end()); int nDens = ptDensities[b].size(); median = (nDens % 2 == 0) ? (ptDensities[b][nDens/2]+ptDensities[b][(nDens-2)/2])/2 : ptDensities[b][(nDens-1)/2]; sigma = ptDensities[b][(int)(.15865*nDens)]; Njets = nDens; } _ptDensity.push_back(median); _sigma.push_back(sigma); _Njets.push_back(Njets); } // Loop over photons and fill vector of isolated ones Particles isolated_photons; foreach (const Particle& photon, photons) { /// Remove photons in ECAL crack region if (inRange(photon.abseta(), 1.37, 1.52)) continue; const double eta_P = photon.eta(); const double phi_P = photon.phi(); //const double pt_P = photon.pt(); //std::cout << "*** Eta = " << eta_P << " - Pt[GeV] = " << pt_P << std::endl; // Compute isolation via particles within an R=0.4 cone of the photon Particles fs = applyProjection(event, "FS").particles(); FourMomentum mom_in_EtCone; foreach (const Particle& p, fs) { // Reject if not in cone if (deltaR(photon.momentum(), p.momentum()) > 0.4) continue; // Reject if in the 5x7 cell central core if (fabs(eta_P - p.eta()) < 0.025 * 5 * 0.5 && fabs(phi_P - p.phi()) < PI/128. * 7 * 0.5) continue; // Sum momentum mom_in_EtCone += p.momentum(); } // Now figure out the correction (area*density) const double EtCone_area = PI*sqr(0.4) - (7*.025)*(5*PI/128.); // cone area - central core rectangle const double correction = _ptDensity[binIndex(fabs(eta_P), _eta_bins_areaoffset)] * EtCone_area; // Discard the photon if there is more than 9 GeV of cone activity // NOTE: Shouldn't need to subtract photon itself (it's in the central core) // NOTE: using expected cut at hadron/particle level, not at reco level if (mom_in_EtCone.Et() - correction > 9*GeV) continue; // Add isolated photon to list isolated_photons.push_back(photon); } // require at least two isolated photons if (isolated_photons.size() < 2) { vetoEvent; } // select leading pT pair sortByPt(isolated_photons); FourMomentum y1 = isolated_photons[0].momentum(); FourMomentum y2 = isolated_photons[1].momentum(); // Leading photon should have pT > 40 GeV, subleading > 30 GeV if (y1.pT() < 40.*GeV) vetoEvent; if (y2.pT() < 30.*GeV) vetoEvent; // this should be be needed anymore since initial selection seems to work... // require the two photons to be separated (dR>0.4) if ( deltaR(y1,y2) < 0.4 ) { vetoEvent; } //double eta1 = y1.eta(); //double eta2 = y2.eta(); //double phi1 = y1.phi(); //double phi2 = y2.phi(); //std::cout << eta1 << " " << eta2 << " " << phi1 << " " << phi2 << std::endl; FourMomentum yy = y1+y2; const double Myy = yy.mass(); const double pTyy = yy.pT(); const double dPhiyy = mapAngle0ToPi(y1.phi() - y2.phi()); const double costhetayy = 2 * y1.pT() * y2.pT() * sinh(y1.eta() - y2.eta()) / Myy / add_quad(Myy, pTyy); // phi* const double costhetastar_ = fabs(tanh(( y1.eta() - y2.eta() ) / 2. )); const double sinthetastar_ = sqrt(1-pow(costhetastar_,2)); const double phistar = tan( (3.14159265359-dPhiyy)/2. )*sinthetastar_; // a_t Vector3 t_hat(y1.x()-y2.x(),y1.y()-y2.y(),0.); // normalize t_hat to vector module double factor = t_hat.mod(); t_hat.setX( t_hat.x() / factor ); t_hat.setY( t_hat.y() / factor ); t_hat.setY( t_hat.z() / factor ); Vector3 At(y1.x()+y2.x(),y1.y()+y2.y(),0.); // compute a_t transverse component with respect to t_hat double tot = t_hat.mod2(); double ss = At.dot(t_hat); double per = At.mod2(); if (tot > 0.0) per -= ss*ss/tot; if (per < 0) per = 0; const double at = per; //std::cout << "A_t = " << at << std::endl; // fill histograms _h_M->fill(Myy, weight); _h_pT->fill(pTyy, weight); _h_dPhi->fill(dPhiyy, weight); _h_costh->fill(costhetayy, weight); _h_phistar->fill(phistar, weight); _h_at->fill(at, weight); _h_ETg1->fill(y1.pt(), weight); _h_ETg2->fill(y2.pt(), weight); _h_etag1->fill(y1.eta(), weight); _h_etag2->fill(y2.eta(), weight); } // Normalise histograms etc., after the run void finalize() { scale(_h_M, crossSection()/sumOfWeights()); scale(_h_pT, crossSection()/sumOfWeights()); scale(_h_dPhi, crossSection()/sumOfWeights()); scale(_h_costh, crossSection()/sumOfWeights()); scale(_h_phistar, crossSection()/sumOfWeights()); scale(_h_at, crossSection()/sumOfWeights()); scale(_h_ETg1, crossSection()/sumOfWeights()); scale(_h_ETg2, crossSection()/sumOfWeights()); scale(_h_etag1, crossSection()/sumOfWeights()); scale(_h_etag2, crossSection()/sumOfWeights()); } private: Histo1DPtr _h_ETg1; Histo1DPtr _h_ETg2; Histo1DPtr _h_etag1; Histo1DPtr _h_etag2; Histo1DPtr _h_M; Histo1DPtr _h_pT; Histo1DPtr _h_dPhi; Histo1DPtr _h_costh; Histo1DPtr _h_phistar; Histo1DPtr _h_at; fastjet::AreaDefinition* _area_def; std::vector _eta_bins; std::vector _eta_bins_areaoffset; std::vector _ptDensity; std::vector _sigma; std::vector _Njets; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ATLAS_2012_DIPHOTONS); }