ra4_macros  bede988c286599a3a84b77a4d788ac0a971e89f9
make_csv_template.cxx
Go to the documentation of this file.
1 // make_csv_template.cxx: Makes templates of CSV template for jets passing
2 // the CSV requirement. Used for reweighting flavor composition of QCD MC
3 
4 #include <iostream>
5 #include <vector>
6 #include "TChain.h"
7 #include "TH1D.h"
8 #include "TCanvas.h"
9 #include "TFile.h"
10 #include "TLegend.h"
11 #include "TLine.h"
12 #include "TString.h"
13 #include "TColor.h"
14 
15 #include "styles.hpp"
16 #include "utilities.hpp"
17 #include "utilities_macros.hpp"
18 #include "utilities_macros_rpv.hpp"
19 
20 namespace {
21  const int nBins=22;
22  const float xMin=0.89;
23  const float xMax=1.0;
24  const TString csvVar("jets_csv");
25 }
26 
27 void makeCSVHist(TFile *file, const std::vector<TString>& samples, const TString& name, const TString& extracut);
28 
29 int main(){
30 
31  std::vector<TString> s_qcd;
32  s_qcd.push_back(filestring("QCD_HT1000to1500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
33  s_qcd.push_back(filestring("QCD_HT1000to1500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
34  s_qcd.push_back(filestring("QCD_HT1500to2000_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
35  s_qcd.push_back(filestring("QCD_HT1500to2000_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
36  s_qcd.push_back(filestring("QCD_HT2000toInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
37  s_qcd.push_back(filestring("QCD_HT2000toInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
38  // The CSV reweighting only affects QCD; the flavor composition of other samples
39  // well defined and does not need correction
40  std::vector<TString> s_other;
41  s_other.push_back(filestring("TTJets_DiLept_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
42  s_other.push_back(filestring("TTJets_DiLept_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
43  s_other.push_back(filestring("TTJets_SingleLeptFromT_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
44  s_other.push_back(filestring("TTJets_SingleLeptFromT_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
45  s_other.push_back(filestring("TTJets_SingleLeptFromTbar_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
46  s_other.push_back(filestring("TTJets_SingleLeptFromTbar_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_ext1"));
47  // this dataset is skimmed to remove the non-hadronic component
48  s_other.push_back(filestring("TTJets_TuneCUETP8M1_13TeV-madgraphMLM", true));
49  // W/Z
50  s_other.push_back(filestring("WJetsToQQ_HT-600ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
51  s_other.push_back(filestring("WJetsToLNu_HT-600ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
52  s_other.push_back(filestring("ZJetsToQQ_HT600toInf_13TeV-madgraph"));
53  s_other.push_back(filestring("DYJetsToLL_M-50_HT-600toInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8"));
54  // single top
55  s_other.push_back(filestring("ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1"));
56  s_other.push_back(filestring("ST_t-channel_antitop_4f_leptonDecays_13TeV-powheg-pythia8_TuneCUETP8M1"));
57  s_other.push_back(filestring("ST_t-channel_top_4f_leptonDecays_13TeV-powheg-pythia8_TuneCUETP8M1"));
58  s_other.push_back(filestring("ST_tW_antitop_5f_inclusiveDecays_13TeV-powheg-pythia8_TuneCUETP8M1"));
59  s_other.push_back(filestring("ST_tW_top_5f_inclusiveDecays_13TeV-powheg-pythia8_TuneCUETP8M1"));
60  // ttbar(W,Z,H,ttbar)
61  s_other.push_back(filestring("TTWJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8"));
62  s_other.push_back(filestring("TTWJetsToQQ_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8"));
63  s_other.push_back(filestring("TTZToQQ_TuneCUETP8M1_13TeV-amcatnlo-pythia8"));
64  s_other.push_back(filestring("TTZToLLNuNu_M-10_TuneCUETP8M1_13TeV-amcatnlo-pythia8"));
65  s_other.push_back(filestring("ttHJetTobb_M125_13TeV_amcatnloFXFX_madspin_pythia8"));
66  s_other.push_back(filestring("TTTT_TuneCUETP8M1_13TeV-amcatnlo-pythia8"));
67  std::vector<TString> s_jetht;
68  s_jetht.push_back(filestring("JetHT_Run2015C_25ns-05Oct2015-v1"));
69  s_jetht.push_back(filestring("JetHT_Run2015D-05Oct2015-v1"));
70  s_jetht.push_back(filestring("JetHT_Run2015D-PromptReco-v4"));
71 
72  // Reading ntuples
73  std::vector<sfeats> Samples;
74  Samples.push_back(sfeats(s_jetht, "Data",kBlack,1,"(trig[11] || trig[12]) && pass && njets<10"));
75  Samples.back().isData = true;
76  Samples.back().doStack = false;
77 
78  // Adding non-skimmed samples
79  std::vector<int> rpv_sam;
80  unsigned nsam(Samples.size());
81  for(unsigned sam(0); sam < nsam; sam++){
82  rpv_sam.push_back(sam);
83  std::vector<TString> sam_files = Samples[sam].file;
84  Samples.push_back(sfeats(sam_files, Samples[sam].label, Samples[sam].color, Samples[sam].style,
85  Samples[sam].cut));
86  } // Loop over samples
87 
88  std::vector<hfeats> vars;
89 
90  TString cuts("(nmus+nels)==1");
91  cuts = "(nmus+nels)==0&&ht>1500&&nbm>=2&&njets>=6&&njets<10 && mj>800";
92  vars.push_back(hfeats(csvVar, nBins, xMin, xMax, rpv_sam, "(CSV-CSV_{cut})/(CSV_{max}-CSV_{cut})", cuts));
93  vars.back().normalize = true;
94 
95  // the old method produced per-jet weights rather than per-event weights
96  bool useOldMethod=false;
97  std::string outFile("csv_newmethod.root");
98  if(useOldMethod) outFile="csv_oldmethod.root";
99 
100  TFile *out = new TFile(outFile.c_str(), "recreate");
101 
102  if(useOldMethod) {
103  makeCSVHist(out, s_jetht, "data_low_njet", "(trig[11] || trig[12])&&njets<10");
104  makeCSVHist(out, s_qcd, "QCD_b_low_njet", "jets_hflavor==5 && njets<10");
105  makeCSVHist(out, s_qcd, "QCD_c_low_njet", "jets_hflavor==4 && njets<10");
106  makeCSVHist(out, s_qcd, "QCD_l_low_njet", "jets_hflavor==0 && njets<10");
107  makeCSVHist(out, s_other, "other_low_njet", "njets<10");
108 
109  makeCSVHist(out, s_jetht, "data_high_njet", "(trig[11] || trig[12])&&njets>=10");
110  makeCSVHist(out, s_qcd, "QCD_b_high_njet", "jets_hflavor==5 && njets>=10");
111  makeCSVHist(out, s_qcd, "QCD_c_high_njet", "jets_hflavor==4 && njets>=10");
112  makeCSVHist(out, s_qcd, "QCD_l_high_njet", "jets_hflavor==0 && njets>=10");
113  makeCSVHist(out, s_other, "other_high_njet", "njets>=10");
114 
115  makeCSVHist(out, s_jetht, "data_low_njet_low_mj", "(trig[11] || trig[12])&&njets<10 && mj>500 && mj<800");
116  makeCSVHist(out, s_qcd, "QCD_b_low_njet_low_mj", "jets_hflavor==5 && njets<10 && mj>500 && mj<800");
117  makeCSVHist(out, s_qcd, "QCD_c_low_njet_low_mj", "jets_hflavor==4 && njets<10 && mj>500 && mj<800");
118  makeCSVHist(out, s_qcd, "QCD_l_low_njet_low_mj", "jets_hflavor==0 && njets<10 && mj>500 && mj<800");
119  makeCSVHist(out, s_other, "other_low_njet_low_mj", "njets<10 && mj>500 && mj<800");
120 
121  makeCSVHist(out, s_jetht, "data_low_njet_high_mj", "(trig[11] || trig[12])&&njets<10 && mj>500 && mj>=800");
122  makeCSVHist(out, s_qcd, "QCD_b_low_njet_high_mj", "jets_hflavor==5 && njets<10 && mj>=800");
123  makeCSVHist(out, s_qcd, "QCD_c_low_njet_high_mj", "jets_hflavor==4 && njets<10 && mj>=800");
124  makeCSVHist(out, s_qcd, "QCD_l_low_njet_high_mj", "jets_hflavor==0 && njets<10 && mj>=800");
125  makeCSVHist(out, s_other, "other_low_njet_high_mj", "njets<10 && mj>=800");
126  }
127  else {
128  makeCSVHist(out, s_jetht, "data_low_njet", "(trig[11] || trig[12])&&njets<8");
129  makeCSVHist(out, s_qcd, "QCD_b_low_njet", "Sum$(abs(jets_hflavor)==5)>0 && njets<8");
130  makeCSVHist(out, s_qcd, "QCD_c_low_njet", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)>0 && njets<8");
131  makeCSVHist(out, s_qcd, "QCD_l_low_njet", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)==0 && njets<8");
132  makeCSVHist(out, s_other, "other_low_njet", "njets<8");
133 
134  makeCSVHist(out, s_jetht, "data_high_njet", "(trig[11] || trig[12])&&njets>=8");
135  makeCSVHist(out, s_qcd, "QCD_b_high_njet", "Sum$(abs(jets_hflavor)==5)>0 && njets>=8");
136  makeCSVHist(out, s_qcd, "QCD_c_high_njet", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)>0 && njets>=8");
137  makeCSVHist(out, s_qcd, "QCD_l_high_njet", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)==0 && njets>=8");
138  makeCSVHist(out, s_other, "other_high_njet", "njets>=8");
139 
140  makeCSVHist(out, s_jetht, "data_low_njet_low_mj", "(trig[11] || trig[12])&&njets<8 && mj>500 && mj<800");
141  makeCSVHist(out, s_qcd, "QCD_b_low_njet_low_mj", "Sum$(abs(jets_hflavor)==5)>0 && njets<8 && mj>500 && mj<800");
142  makeCSVHist(out, s_qcd, "QCD_c_low_njet_low_mj", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)>0 && njets<8 && mj>500 && mj<800");
143  makeCSVHist(out, s_qcd, "QCD_l_low_njet_low_mj", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)==0 && njets<8 && mj>500 && mj<800");
144  makeCSVHist(out, s_other, "other_low_njet_low_mj", "njets<8 && mj>500 && mj<800");
145 
146  makeCSVHist(out, s_jetht, "data_low_njet_high_mj", "(trig[11] || trig[12])&&njets<8 && mj>500 && mj>=800");
147  makeCSVHist(out, s_qcd, "QCD_b_low_njet_high_mj", "Sum$(abs(jets_hflavor)==5)>0 && njets<8 && mj>=800");
148  makeCSVHist(out, s_qcd, "QCD_c_low_njet_high_mj", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)>0 && njets<8 && mj>=800");
149  makeCSVHist(out, s_qcd, "QCD_l_low_njet_high_mj", "Sum$(abs(jets_hflavor)==5)==0 && Sum$(abs(jets_hflavor)==4)==0 && njets<8 && mj>=800");
150  makeCSVHist(out, s_other, "other_low_njet_high_mj", "njets<8 && mj>=800");
151  }
152  out->Close();
153 }
154 
155 void makeCSVHist(TFile *file, const std::vector<TString>& samples, const TString& name, const TString& extracut)
156 {
157 
158  std::cout << "Making histograms " << name << " for cut " << extracut << std::endl;
159 
160  TChain *ch = new TChain("tree");
161  for(auto isample : samples) {
162  ch->Add(isample);
163  }
164 
165  TString cut("(nmus+nels)==0&&ht>1500&&nbm>=2&&njets>=6");
166  TString weightandcut;
167  if(extracut.Length()==0) {
168  weightandcut=Form("%s*weight*w_pu_rpv/eff_trig*(%s)", rpv::luminosity.Data(), cut.Data());
169  }
170  else weightandcut=Form("%s*weight*w_pu_rpv/eff_trig*(%s&&%s)", rpv::luminosity.Data(), cut.Data(), extracut.Data());
171  TString cutAndExtraCut(Form("(%s&&%s)", cut.Data(), extracut.Data()));
172 
173  // weighted histogram for convenient display
174  TH1F *weightedHist = new TH1F(name, name, nBins, xMin, xMax);
175  ch->Project(name, csvVar.Data(), weightandcut.Data());
176 
177  // raw number of MC counts
178  TString rawHistName(Form("%s_raw", name.Data()));
179  TH1F *rawHist = new TH1F(rawHistName, rawHistName, nBins, xMin, xMax);
180  ch->Project(rawHistName, csvVar.Data(), cutAndExtraCut.Data());
181 
182  // weights need to be saved separately for proper application of Barlow-Beeston method
183  TString weightHistName(Form("%s_weights", name.Data()));
184  TH1F *weight = new TH1F(weightHistName, weightHistName, nBins, xMin, xMax);
185  weight->Add(weightedHist);
186  for(int i=1; i<=weight->GetNbinsX(); i++) {
187  weight->SetBinContent(i, weightedHist->GetBinContent(i)/rawHist->GetBinContent(i));
188  weight->SetBinError(i, weightedHist->GetBinError(i)/rawHist->GetBinContent(i));
189  }
190 
191  file->cd();
192  weightedHist->Write();
193  weight->Write();
194  rawHist->Write();
195 }
tuple ch
if sofar>10: continue
TString luminosity
TString filestring(TString dataset, bool isSkimmed=true)
tuple file
Definition: parse_card.py:238
int main()
void makeCSVHist(TFile *file, const std::vector< TString > &samples, const TString &name, const TString &extracut)