StRoot  1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Groups Pages
doFlowSumAll.C
1 //
3 // $Id: doFlowSumAll.C,v 1.6 2011/07/25 15:54:50 posk Exp $
4 //
5 // Makes root.files.<cenNo> files containing lists of flow.hist.root files
6 // in the specified subdirectory/link of outDir (which is a link in this directory).
7 // Adds the histograms together for all files for all specified centralities.
8 // The CenNo is actually the baseRunNo + the centrality.
9 // First adds all histograms, then, for some, does it again for weighted averages,
10 // for LYZ including in the error the spread from the different batch jobs.
11 // Thus, for LYZ do only one centrality at a time
12 // Last file is the output file.
13 // This macro must be in your working directory.
14 // if just AnalysisMaker set nNames = 6
15 //
16 // by Art Poskanzer and Kirill Filimonov
17 //
19 
20 #include <iostream.h>
21 #include <fstream.h>
22 #include "TObject.h"
23 #include "TH1.h"
24 #include "TH2.h"
25 #include "TFile.h"
26 #include "TKey.h"
27 #include "TChain.h"
28 
29 void doFlowSumAll(Int_t firstCenNo, Int_t lastCenNo, char* dirName = "", Int_t outputRunNo=99) {
30 
31  const int nSels = 2;
32  const int nHars = 4; // 4
33  bool LYZ = kFALSE;
34  bool reCent = kFALSE;
35 
36  char rootFileName[80];
37  char logFileName[80];
38  char listFileName[80];
39  char rootOutName[80];
40  char logOutName[80];
41  char logTitle[80];
42  char lsCommand[80];
43  char cpCommand[80];
44  char logTitleCommand[80];
45  char logCommand[80];
46  char firstPassCommand[80];
47  char secondPassCommand[80];
48  char rmCommand[80];
49  TFile* histFile[1000];
50  Float_t j01 = 2.405;
51 
52  // names of histograms to be added with weighting (not including Obs hists)
53  const char* baseName[] = {
54  "Flow_Res_",
55  "Flow_v2D_", // must be before vEta and vPt
56  "Flow_vEta_",
57  "Flow_vPt_",
58  "Flow_v_",
59  "Flow_q_",
60  "Flow_v2D_ScalarProd_",
61  "Flow_vEta_ScalarProd_",
62  "Flow_vPt_ScalarProd_",
63  "Flow_v_ScalarProd_",
64  "Flow_Cumul_vEta_Order2_",
65  "Flow_Cumul_vPt_Order2_",
66  "Flow_Cumul_v_Order2_",
67  "Flow_Cumul_vEta_Order4_",
68  "Flow_Cumul_vPt_Order4_",
69  "Flow_Cumul_v_Order4_",
70  "FlowLYZ_r0theta_",
71  "FlowLYZ_Vtheta_", // must come after r0theta
72  "FlowLYZ_V_",
73  "FlowLYZ_vr0_",
74  "FlowLYZ_vEta_",
75  "FlowLYZ_vPt_",
76  "FlowLYZ_v_",
77  "FlowLYZ_Gtheta0_",
78  "FlowLYZ_Gtheta1_",
79  "FlowLYZ_Gtheta2_",
80  "FlowLYZ_Gtheta3_",
81  "FlowLYZ_Gtheta4_"
82  };
83  //const int nNames = sizeof(baseName) / sizeof(char*);
84  const int nNames = 6; // if just AnalysisMaker
85 
86  // open the output log file
87  sprintf(logOutName, "ana%2d.log", outputRunNo);
88  sprintf(logTitle, " Combination of centralities %2d to %2d", firstCenNo, lastCenNo);
89  sprintf(logTitleCommand, "echo %s > %s", logTitle, logOutName);
90  system(logTitleCommand);
91 
92  // open the input files
93  Int_t nFile = 0;
94  for (int nc = firstCenNo; nc <= lastCenNo; nc++) {
95  sprintf(listFileName, "root.files.%2d", nc);
96  //sprintf(lsCommand, "ls -1 outDir/*-%2d/flow.hist.root > %s", nc, listFileName);
97  sprintf(lsCommand, "ls -1 outDir/%s*-%2d/flow.hist.root > %s", dirName, nc, listFileName);
98  system(lsCommand);
99  sprintf(cpCommand, "cat %s >> %s", listFileName, logOutName);
100  system(cpCommand);
101  fstream ascii_in;
102  ascii_in.open(listFileName, ios::in);
103 
104  while(!ascii_in.eof()) {
105  ascii_in >> rootFileName;
106  if (strstr(rootFileName,".root")==0) continue;
107  histFile[nFile] = new TFile(rootFileName);
108  char* cp = strstr(rootFileName, "flow.");
109  if (cp) *cp = '\0'; // truncate to directory name
110  sprintf(firstPassCommand, "test -f %sflowPhiWgtNew.hist.root", rootFileName);
111  sprintf(secondPassCommand, "test -f %sflowPhiWgt.hist.root", rootFileName);
112  if (LYZ) sprintf(secondPassCommand, "test -f %sflow.firstPassLYZNew.root", rootFileName);
113  if (reCent) {
114  sprintf(firstPassCommand, "test -f %sflow.reCentAnaNew.root", rootFileName);
115  sprintf(secondPassCommand, "test -f %sflow.reCentAna.root", rootFileName);
116  }
117  if (system(firstPassCommand) && system(secondPassCommand)) {
118  cout << "####################################" << endl;
119  cout << "### No 2nd pass for " << rootFileName << endl;
120  cout << "####################################" << endl;
121  delete histFile[nFile];
122  continue;
123  }
124  cout << nFile+1 << " directory name = " << rootFileName << endl;
125  sprintf(logFileName, "%sana.log", rootFileName);
126  sprintf(logCommand, "cat %s >> %s", logFileName, logOutName);
127  system(logCommand);
128  nFile++;
129  }
130  }
131  const Int_t nFiles = nFile;
132  cout << endl;
133  cout << "input files: " << nFiles << endl;
134  if (!nFiles) {
135  cout << "#### no files" << endl;
136  return;
137  }
138 
139  TH2* yieldPartHist[nFiles];
140  TH1* hist[nFiles+1];
141  TH1* yieldPartHistPt[nFiles];
142  TH1* yieldPartHistEta[nFiles];
143  Float_t yieldTotal[nFiles];
144  TH1* hist_r0theta[nSels][nHars];
145 
146  // open the output file
147  sprintf(rootOutName, "ana%2d.root", outputRunNo);
148  cout << " output file name = " << rootOutName << endl;
149  histFile[nFiles] = new TFile(rootOutName, "RECREATE"); // last file is new out file
150  cout << " log file name = " << logOutName << endl << endl;
151 
152  // add all histograms with no weighting
153  // iterate over the histograms in file[0] and add all the others to it
154  // write output to file[nFiles]
155  TKey* key;
156  TObject* obj;
157  const char* objName = 0;
158  TIter nextkey(histFile[0]->GetListOfKeys());
159  while ((key = (TKey*)nextkey())) {
160  histFile[0]->cd();
161  //key->ls();
162  obj = key->ReadObj();
163  if (obj->InheritsFrom("TH1")) { // TH1 or TProfile
164  hist[0] = (TH1*)obj;
165  objName = key->GetName();
166  cout << " hist name= " << objName << endl;
167  for (int n = 1; n < nFiles; n++) {
168  hist[1] = (TH1*)histFile[n]->Get(objName);
169  hist[0]->Add(hist[1]);
170  }
171  }
172  histFile[nFiles]->cd();
173  obj->Write(objName);
174  delete obj;
175  obj = NULL;
176  }
177 
178  // get yield histograms
179  for (int n = 0; n < nFiles; n++) {
180  yieldPartHist[n] = dynamic_cast<TH2*>(histFile[n]->Get("Flow_YieldPart2D"));
181  yieldPartHistPt[n] = dynamic_cast<TH1*>(histFile[n]->Get("FlowLYZ_YieldPartPt"));
182  yieldPartHistEta[n] = dynamic_cast<TH1*>(histFile[n]->Get("FlowLYZ_YieldPartEta"));
183  if (yieldPartHistPt[n]) { yieldTotal[n] = yieldPartHistPt[n]->Integral(); }
184  }
185 
186  cout << endl << "with weighting" << endl;
187  TH1F* yieldY;
188  TH1F* yieldPt;
189  for (int pageNumber = 0; pageNumber < nNames; pageNumber++ ) {
190  nextPage:
191  bool twoD = kFALSE;
192  if (strstr(baseName[pageNumber],"v2D")) twoD = kTRUE;
193 
194  for (int selN = 0; selN < nSels; selN++) {
195 
196  // no harmonics
197  bool noHars = kFALSE;
198  int nHar = nHars;
199  if (strstr(baseName[pageNumber],"_v_")!=0 ||
200  strcmp(baseName[pageNumber],"Flow_Cos_")==0 ||
201  strstr(baseName[pageNumber],"Res")!=0 ||
202  strstr(baseName[pageNumber],"_V_")!=0 ||
203  strstr(baseName[pageNumber],"_vr0_")!=0) {
204  noHars = kTRUE;
205  nHar = 1;
206  }
207 
208  // two harmonics
209  if (strstr(baseName[pageNumber],"theta_")!=0) {
210  nHar = 2;
211  }
212 
213  for (int harN = 0; harN < nHar; harN++) {
214 
215  // construct histName
216  TString histName(baseName[pageNumber]);
217  histName += "Sel";
218  histName += selN+1;
219  if (!noHars) {
220  histName += "_Har";
221  histName += harN+1;
222  }
223 
224  if (strstr(histName.Data(), "_Gtheta") && harN > 1) { continue; }
225 
226  // get the histograms
227  for (int n = 0; n < nFiles+1; n++) {
228  hist[n] = dynamic_cast<TH1*>(histFile[n]->Get(histName));
229  if (!hist[n]) {
230  if (pageNumber < nNames) {
231  pageNumber++;
232  goto nextPage;
233  } else {
234  cout << endl;
235  cout << "*** Error: Correct nNames in macro" << endl;
236  }
237  } else if (n == 0) {
238  cout << " hist name= " << histName << endl;
239  }
240  }
241 
242  // get the r0theta output hist
243  if (strstr(histName.Data(), "r0theta")) {
244  hist_r0theta[selN][harN] = dynamic_cast<TH1*>(histFile[nFiles]->Get(histName));
245  }
246 
247  // get the number of bins for this hist
248  if (!hist[0]) { continue; }
249  int nBins; // set by 2D
250  int xBins = hist[0]->GetNbinsX();
251  int yBins = 0.;
252  if (twoD) {
253  yBins = hist[0]->GetNbinsY();
254  nBins = xBins + (xBins + 2) * yBins;
255  float yMax = hist[0]->GetXaxis()->GetXmax();
256  float yMin = hist[0]->GetXaxis()->GetXmin();
257  yieldY = new TH1F("Yield_Y", "Yield_Y", xBins, yMin, yMax);
258  yieldY->SetXTitle("Rapidity");
259  yieldY->SetYTitle("Counts");
260  float ptMax = hist[0]->GetYaxis()->GetXmax();
261  yieldPt = new TH1F("Yield_Pt", "Yield_Pt", yBins, 0., ptMax);
262  yieldPt->SetXTitle("Pt (GeV/c)");
263  yieldPt->SetYTitle("Counts");
264  } else {
265  nBins = xBins + 2;
266  }
267 
268  // loop over the bins
269  if (strstr(baseName[pageNumber],"LYZ")) {
270  cout << " with LYZ yield weighted averaging" << endl;
271  double v, yield, content, error, Vtheta, VthetaErr, r0, r0Err;
272  double vSum, v2Sum, error2Sum, yieldSum, yieldSum2, yield2Sum;
273  //for (int bin = 0; bin < nBins; bin++) {
274  for (int bin = 1; bin < nBins-1; bin++) {
275  v = 0.;
276  vSum = 0.;
277  v2Sum = 0.;
278  content = 0.;
279  error = 0.;
280  error2Sum = 0.;
281  yield = 0.;
282  yieldSum = 0.;
283  yield2Sum = 0.;
284  for (int n = 0; n < nFiles; n++) {
285  if (hist[n]) {
286  if (strstr(histName.Data(), "vEta")) {
287  yield = yieldPartHistEta[n]->GetBinContent(bin);
288  } else if (strstr(histName.Data(), "vPt")) {
289  yield = yieldPartHistPt[n]->GetBinContent(bin);
290  } else { // r0theta, Vtheta, _V_, vr0, _v_, Gtheta
291  yield = yieldTotal[n];
292  }
293  v = hist[n]->GetBinContent(bin);
294  if (v != 0 && yield > 1.) { // error is wrong for one count
295  yieldSum += yield;
296  yield2Sum += yield*yield;
297  vSum += yield * v;
298  v2Sum += yield * v*v;
299  error2Sum += pow(yield * hist[n]->GetBinError(bin), 2.);
300  }
301  }
302  } // nFiles
303  if (yieldSum) {
304  content = vSum / yieldSum;
305  v2Sum /= yieldSum;
306  yieldSum2 = yieldSum*yieldSum;
307  yield2Sum /= yieldSum2;
308  error2Sum /= yieldSum2;
309  if (strstr(baseName[pageNumber],"_G")) {
310  error = 0.;
311  } else if (nFiles > 1 && yield2Sum != 1.) {
312  error = sqrt(error2Sum + (v2Sum - content*content) / (1/yield2Sum - 1));
313  } else {
314  error = sqrt(error2Sum); // only 1st term
315  }
316  if (strstr(histName.Data(), "v")) {
317 // cout << histName << " 1st, 2nd: " << sqrt(error2Sum) << ", " << sqrt(error*error - error2Sum) << endl;
318  }
319  //error = sqrt(error2Sum); // only 1st term
320  }
321  hist[nFiles]->SetBinContent(bin, content);
322  hist[nFiles]->SetBinError(bin, error);
323  if (strstr(histName.Data(), "Vtheta")) {
324  //cout << content << ", " << error2Sum << ", " << v2Sum << ", " << content*content << ", " << yield2Sum << ", " << error << endl;
325  Vtheta = hist[nFiles]->GetBinContent(bin);
326  VthetaErr = hist[nFiles]->GetBinError(bin);
327  if (VthetaErr > 0.001) { // calculate r0theta from <Vtheta>
328  //float perCentErr = Vtheta ? 100. * VthetaErr / Vtheta : 0.;
329  //cout << "Vtheta= " << Vtheta << " +/- " << perCentErr << "%" << endl;
330  r0 = Vtheta ? j01 / Vtheta : 0.; // Eq. 9
331  r0Err = Vtheta ? r0 * VthetaErr / Vtheta : 0.;
332  //cout << hist_r0theta[selN][harN]->GetBinContent(bin) << ", " << r0 << endl;
333  hist_r0theta[selN][harN]->SetBinContent(bin, r0);
334  hist_r0theta[selN][harN]->SetBinError(bin, r0Err);
335  //cout << "r0= " << r0 << " +/- " << r0Err << endl;
336  }
337  }
338  } // bins
339  } else if ((strstr(baseName[pageNumber],"Cos")) ||
340  (strstr(baseName[pageNumber],"Res"))) { // with error weighting
341  cout << " With error weighting" << endl;
342  double content, error, errorSq, meanContent, meanError, weight;
343  for (int bin = 0; bin < nBins; bin++) {
344  meanContent = 0.;
345  meanError = 0.;
346  weight = 0.;
347  for (int n = 0; n < nFiles; n++) {
348  if (hist[n]) {
349  content = hist[n]->GetBinContent(bin);
350  error = hist[n]->GetBinError(bin);
351  errorSq = error * error;
352  if (errorSq > 0.) {
353  meanContent += content / errorSq;
354  weight += 1. / errorSq;
355  }
356  }
357  } // nFiles
358  if (weight > 0.) {
359  meanContent /= weight;
360  meanError = sqrt(1. / weight);
361  hist[nFiles]->SetBinContent(bin, meanContent);
362  hist[nFiles]->SetBinError(bin, meanError);
363  }
364  } // bins
365  } else { // with yield weighting
366  cout << " with yield weighted averaging" << endl;
367  double v, yield, content, error, y, pt;
368  double vSum, error2sum, yieldSum;
369  for (int bin = 0; bin < nBins; bin++) {
370  v = 0.;
371  vSum = 0.;
372  content = 0.;
373  error = 0.;
374  error2sum = 0.;
375  yield = 0.;
376  yieldSum = 0.;
377  for (int n = 0; n < nFiles; n++) {
378  if (hist[n]) {
379  if (strstr(histName.Data(),"v2D")) {
380  yield = yieldPartHist[n]->GetBinContent(bin);
381  } else if (strstr(histName.Data(),"vEta")) {
382  yield = yieldPartHist[n]->Integral(bin, bin, 1, yBins);
383  if (selN==0 && harN==0) {
384  y = yieldPartHist[n]->GetXaxis()->GetBinCenter(bin);
385  yieldY->Fill(y, yield);
386  }
387  } else if (strstr(histName.Data(),"vPt")) {
388  yield = yieldPartHist[n]->Integral(1, xBins, bin, bin);
389  if (selN==0 && harN==0) {
390  pt = yieldPartHist[n]->GetYaxis()->GetBinCenter(bin);
391  yieldPt->Fill(pt, yield);
392  }
393  } else { // _v_ and _q_
394  yield = yieldPartHist[n]->Integral();
395  }
396  v = hist[n]->GetBinContent(bin);
397  if (v != 0. && yield > 1.) { // error is wrong for one count
398  yieldSum += yield;
399  vSum += yield * v;
400  error2sum += pow(yield * hist[n]->GetBinError(bin), 2.);
401  }
402  }
403  } // nFiles
404  if (yieldSum) {
405  content = vSum / yieldSum;
406  error = sqrt(error2sum) / yieldSum;
407  }
408  hist[nFiles]->SetBinContent(bin, content);
409  hist[nFiles]->SetBinError(bin, error);
410  } // bin
411  } // else
412  } // harN
413  } // selN
414  cout << endl;
415  } // pageNumber
416 
417  cout << endl;
418  //histFile[nFiles]->ls();
419  histFile[nFiles]->Write(0, TObject::kOverwrite);
420  histFile[nFiles]->Close();
421  delete histFile[nFiles];
422  sprintf(rmCommand, "rm %s", listFileName);
423  system(rmCommand);
424 }
425 
427 //
428 // $Log: doFlowSumAll.C,v $
429 // Revision 1.6 2011/07/25 15:54:50 posk
430 // Added correction for non-flatness of event plane.
431 //
432 // Revision 1.5 2011/03/10 18:56:34 posk
433 // Added histogram for laboratory azimuthal distribution of particles.
434 //
435 // Revision 1.4 2010/09/30 19:28:21 posk
436 // Instead of reversing the weight for negative pseudrapidity for odd harmonics,
437 // it is now done only for the first harmonic.
438 // Recentering is now done for all harmonics.
439 //
440 // Revision 1.3 2007/02/06 19:00:50 posk
441 // In Lee Yang Zeros method, introduced recentering of Q vector.
442 // Reactivated eta symmetry cut.
443 //
444 // Revision 1.1 2006/03/22 21:59:51 posk
445 // Macro and shell script to sum the outputs of the second pass.
446 //
447 //