Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions Common/Tasks/centralityStudypp.cxx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2019-2020 CERN and copyright holders of ALICE O2.

Check failure on line 1 in Common/Tasks/centralityStudypp.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[name/workflow-file]

Name of a workflow file must match the name of the main struct in it (without the PWG prefix). (Class implementation files should be in "Core" directories.)
// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders.
// All rights not expressly granted are reserved.
//
Expand Down Expand Up @@ -263,6 +263,7 @@

histPointers.insert({histPath + "hFDDA_Collisions", histos.add((histPath + "hFDDA_Collisions").c_str(), "hFDDA_Collisions", {kTH1D, {{axisMultUltraFineFDDA}}})});
histPointers.insert({histPath + "hFDDC_Collisions", histos.add((histPath + "hFDDC_Collisions").c_str(), "hFDDC_Collisions", {kTH1D, {{axisMultUltraFineFDDC}}})});
histPointers.insert({histPath + "hFDDM_Collisions", histos.add((histPath + "hFDDM_Collisions").c_str(), "hFDDM_Collisions", {kTH1D, {{axisMultUltraFineFDDC}}})});

histPointers.insert({histPath + "hFV0A_Collisions", histos.add((histPath + "hFV0A_Collisions").c_str(), "hFV0A_Collisions", {kTH1D, {{axisMultUltraFineFV0A}}})});
histPointers.insert({histPath + "hNGlobalTracks", histos.add((histPath + "hNGlobalTracks").c_str(), "hNGlobalTracks", {kTH1D, {{axisMultUltraFineGlobalTracks}}})});
Expand All @@ -276,6 +277,7 @@

histPointers.insert({histPath + "hFDDA_Collisions_Unequalized", histos.add((histPath + "hFDDA_Collisions_Unequalized").c_str(), "hFDDA_Collisions_Unequalized", {kTH1D, {{axisMultUltraFineFDDA}}})});
histPointers.insert({histPath + "hFDDC_Collisions_Unequalized", histos.add((histPath + "hFDDC_Collisions_Unequalized").c_str(), "hFDDC_Collisions_Unequalized", {kTH1D, {{axisMultUltraFineFDDC}}})});
histPointers.insert({histPath + "hFDDM_Collisions_Unequalized", histos.add((histPath + "hFDDM_Collisions_Unequalized").c_str(), "hFDDM_Collisions_Unequalized", {kTH1D, {{axisMultUltraFineFDDC}}})});

histPointers.insert({histPath + "hFV0A_Collisions_Unequalized", histos.add((histPath + "hFV0A_Collisions_Unequalized").c_str(), "hFV0A_Collisions_Unequalized", {kTH1D, {{axisMultUltraFineFV0A}}})});
histPointers.insert({histPath + "hNGlobalTracks_Unequalized", histos.add((histPath + "hNGlobalTracks_Unequalized").c_str(), "hNGlobalTracks_Unequalized", {kTH1D, {{axisMultUltraFineGlobalTracks}}})});
Expand Down Expand Up @@ -456,13 +458,13 @@
getHist(TH1, histPath + "hCollisionSelection")->Fill(11);

// if we got here, we also finally fill the FT0C histogram, please
histos.fill(HIST("hNPVContributors"), collision.multNTracksPV());
histos.fill(HIST("hFT0A_Collisions"), collision.multFT0A());
histos.fill(HIST("hFT0C_Collisions"), collision.multFT0C());
histos.fill(HIST("hFT0M_Collisions"), (collision.multFT0A() + collision.multFT0C()));
histos.fill(HIST("hFDDA_Collisions"), collision.multFDDA());
histos.fill(HIST("hFDDC_Collisions"), collision.multFDDC());
histos.fill(HIST("hFV0A_Collisions"), collision.multFV0A());
histos.fill(HIST("hNPVContributors"), multNTracksPV);
histos.fill(HIST("hFT0A_Collisions"), multFT0A);
histos.fill(HIST("hFT0C_Collisions"), multFT0C);
histos.fill(HIST("hFT0M_Collisions"), multFT0A + multFT0C);
histos.fill(HIST("hFDDA_Collisions"), multFDDA);
histos.fill(HIST("hFDDC_Collisions"), multFDDC);
histos.fill(HIST("hFV0A_Collisions"), multFV0A);
histos.fill(HIST("hNGlobalTracks"), collision.multNTracksGlobal());
histos.fill(HIST("hNMFTTracks"), collision.mftNtracks());

Expand All @@ -473,6 +475,7 @@
getHist(TH1, histPath + "hFT0M_Collisions")->Fill((multFT0A + multFT0C));
getHist(TH1, histPath + "hFDDA_Collisions")->Fill(multFDDA);
getHist(TH1, histPath + "hFDDC_Collisions")->Fill(multFDDC);
getHist(TH1, histPath + "hFDDM_Collisions")->Fill(multFDDA + multFDDC);
getHist(TH1, histPath + "hFV0A_Collisions")->Fill(multFV0A);
getHist(TH1, histPath + "hNGlobalTracks")->Fill(multNTracksGlobal);
getHist(TH1, histPath + "hNMFTTracks")->Fill(mftNtracks);
Expand Down
105 changes: 99 additions & 6 deletions Common/Tools/Multiplicity/multCalibrator.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
#include <RtypesCore.h>

#include <cmath>
#include <iostream> // FIXME

Check failure on line 32 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[include-iostream]

Do not include iostream. Use O2 logging instead.

using namespace std;

Expand All @@ -40,7 +40,7 @@
multCalibrator::multCalibrator() : TNamed(),
lDesiredBoundaries(0),
lNDesiredBoundaries(0),
fkPrecisionWarningThreshold(1.0),
fkPrecisionWarningThreshold(1000.0),
fInputFileName("AnalysisResults.root"),
fOutputFileName("CCDB-objects.root"),
fAnchorPointValue(-1),
Expand All @@ -57,7 +57,7 @@
multCalibrator::multCalibrator(const char* name, const char* title) : TNamed(name, title),
lDesiredBoundaries(0),
lNDesiredBoundaries(0),
fkPrecisionWarningThreshold(1.0),
fkPrecisionWarningThreshold(1000.0),
fInputFileName("AnalysisResults.root"),
fOutputFileName("CCDB-objects.root"),
fAnchorPointValue(-1),
Expand Down Expand Up @@ -93,14 +93,14 @@
// --- output: fOutputFileName, containing OABD object
//

cout << "=== STARTING CALIBRATION PROCEDURE ===" << endl;

Check failure on line 96 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
cout << " * Input File.....: " << fInputFileName.Data() << endl;

Check failure on line 97 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
cout << " * Output File....: " << fOutputFileName.Data() << endl;

Check failure on line 98 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
cout << endl;

Check failure on line 99 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).

TFile* fileInput = new TFile(fInputFileName.Data(), "READ");
if (!fileInput) {
cout << "Input file not found!" << endl;

Check failure on line 103 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
return kFALSE;
}

Expand All @@ -109,18 +109,18 @@
for (Int_t iv = 0; iv < kNCentEstim; iv++) {
hRaw[iv] = reinterpret_cast<TH1D*>(fileInput->Get(Form("multiplicity-qa/multiplicityQa/h%s", fCentEstimName[iv].Data())));
if (!hRaw[iv]) {
cout << Form("File does not contain histogram h%s, which is necessary for calibration!", fCentEstimName[iv].Data()) << endl;

Check failure on line 112 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
return kFALSE;
}
}

cout << "Histograms loaded! Will now calibrate..." << endl;

Check failure on line 117 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).

// Create output file
TFile* fOut = new TFile(fOutputFileName.Data(), "RECREATE");
TH1F* hCalib[kNCentEstim];
for (Int_t iv = 0; iv < kNCentEstim; iv++) {
cout << Form("Calibrating estimator: %s", fCentEstimName[iv].Data()) << endl;

Check failure on line 123 in Common/Tools/Multiplicity/multCalibrator.cxx

View workflow job for this annotation

GitHub Actions / O2 linter

[logging]

Use O2 logging (LOG, LOGF, LOGP).
hCalib[iv] = GetCalibrationHistogram(hRaw[iv], Form("hCalib%s", fCentEstimName[iv].Data()));
hCalib[iv]->Write();
}
Expand Down Expand Up @@ -230,6 +230,54 @@
cout << "Set standard adaptive percentile boundaries! Nboundaries: " << lNDesiredBoundaries << endl;
}

//________________________________________________________________
void multCalibrator::SetRun3AdaptiveBoundaries()
{
// Function to set standard adaptive boundaries
// Run 3 exclusive: goes to 0.00001% binning for highest multiplicity
// Warning: this setting requires well filled input histograms
//
// -> at 0.00001%, one needs 10 million events to get a single one in the highest
// category. In Run 3, at 500 kHz, this corresponds to 20 seconds of dataking;
// any run lasting over 5 minutes should have enough to calibrate to the highest
// boundary.
//
// QA when using hyperfine binning is still advised:
// Selecting very fine percentages may select on spurious situations
// such as beam-background, pileup, etc.

lNDesiredBoundaries = 0;
lDesiredBoundaries = new Double_t[1100];
lDesiredBoundaries[0] = 100;
// From Low To High Multiplicity
for (Int_t ib = 1; ib < 91; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 1.0;
}
for (Int_t ib = 1; ib < 91; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 0.1;
}
for (Int_t ib = 1; ib < 91; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 0.01;
}
for (Int_t ib = 1; ib < 91; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 0.001;
}
for (Int_t ib = 1; ib < 91; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 0.0001;
}
for (Int_t ib = 1; ib < 101; ib++) {
lNDesiredBoundaries++;
lDesiredBoundaries[lNDesiredBoundaries] = lDesiredBoundaries[lNDesiredBoundaries - 1] - 0.00001;
}
lNDesiredBoundaries++;
cout << "Set run 3 adaptive percentile boundaries! Nboundaries: " << lNDesiredBoundaries << endl;
}

//________________________________________________________________
void multCalibrator::SetStandardOnePercentBoundaries()
{
Expand All @@ -244,6 +292,32 @@
cout << "Set standard 1%-wide percentile boundaries! Nboundaries: " << lNDesiredBoundaries << endl;
}

//________________________________________________________________
bool multCalibrator::IsBinningSane(TH1* histogram)
{
// check if binning that will be attempted is too fine for this histogram
double minimumBinWidth = 1000.0f;
for (int ib = 0; ib < lNDesiredBoundaries - 1; ib++) {
if (std::abs(lDesiredBoundaries[ib + 1] - lDesiredBoundaries[ib]) < minimumBinWidth) {
minimumBinWidth = std::abs(lDesiredBoundaries[ib + 1] - lDesiredBoundaries[ib]);
}
}
if (minimumBinWidth < 1e-9) {
cout << "Excessively fine binning requested: minimum bin width registers as " << minimumBinWidth << endl;
return false; // not reasonable
}
if (histogram->GetEntries() < 1000.0 / minimumBinWidth) {
cout << "Histogram " << histogram->GetName() << " does not have enough entries (" << histogram->GetEntries() << ") to calibrate!" << endl;
return false;
}
if (std::abs(histogram->GetMean()) < 1e-9) {
cout << "Histogram " << histogram->GetName() << " has suspicious mean: " << histogram->GetMean() << endl;
return false;
}
cout << "Sanity check: " << histogram->GetName() << ", entries = " << histogram->GetEntries() << ", 1/(min bin width) = " << 100.0 / minimumBinWidth << " -> OK ! " << endl;
return true;
}

//________________________________________________________________
TH1F* multCalibrator::GetCalibrationHistogram(TH1* histoRaw, TString lHistoName)
{
Expand All @@ -260,6 +334,19 @@
cout << "Last boundary: " << lDesiredBoundaries[0] << endl;
}

// binning check
if (!IsBinningSane(histoRaw)) {
cout << "Requested binning is not viable for input histogram named " << histoRaw->GetName() << "! Will return 100.5 for all requests." << endl;
Double_t lDummyBounds[2];
lDummyBounds[0] = 0;
lDummyBounds[1] = 1.0;
TH1F* hCalib = new TH1F(lHistoName.Data(), "", 1, lDummyBounds);
hCalib->SetBinContent(0, 100.5);
hCalib->SetBinContent(1, 100.5);
hCalib->SetBinContent(2, 100.5);
return hCalib;
}

// Aux vars
Double_t lMiddleOfBins[1000];
for (Long_t lB = 1; lB < lNDesiredBoundaries; lB++) {
Expand All @@ -278,15 +365,22 @@
if (fAnchorPointValue > 0)
lDisplacedii++;
lBounds[lDisplacedii] = GetBoundaryForPercentile(histoRaw, lDesiredBoundaries[ii], lPrecision[ii]);
bool warnUser = false;
TString lPrecisionString = "(Precision OK)";
if (ii != 0 && ii != lNDesiredBoundaries - 1) {
// check precision, please
if (lPrecision[ii] / TMath::Abs(lDesiredBoundaries[ii + 1] - lDesiredBoundaries[ii]) > fkPrecisionWarningThreshold)
if ((lPrecision[ii] / TMath::Abs(lDesiredBoundaries[ii + 1] - lDesiredBoundaries[ii])) > fkPrecisionWarningThreshold) {
lPrecisionString = "(WARNING: BINNING MAY LEAD TO IMPRECISION!)";
if (lPrecision[ii] / TMath::Abs(lDesiredBoundaries[ii - 1] - lDesiredBoundaries[ii]) > fkPrecisionWarningThreshold)
warnUser = true;
}
if ((lPrecision[ii] / TMath::Abs(lDesiredBoundaries[ii - 1] - lDesiredBoundaries[ii])) > fkPrecisionWarningThreshold) {
lPrecisionString = "(WARNING: BINNING MAY LEAD TO IMPRECISION!)";
warnUser = true;
}
}
if (warnUser) {
cout << histoRaw->GetName() << " boundaries, percentile: " << lDesiredBoundaries[ii] << "%\t Signal value = " << lBounds[lDisplacedii] << "\tprecision = " << lPrecision[ii] << "% " << lPrecisionString.Data() << endl;
}
cout << histoRaw->GetName() << " boundaries, percentile: " << lDesiredBoundaries[ii] << "%\t Signal value = " << lBounds[lDisplacedii] << "\tprecision = " << lPrecision[ii] << "% " << lPrecisionString.Data() << endl;
}
TH1F* hCalib = new TH1F(lHistoName.Data(), "", fAnchorPointValue < 0 ? lNDesiredBoundaries - 1 : lNDesiredBoundaries, lBounds);
hCalib->SetDirectory(0);
Expand All @@ -311,7 +405,6 @@
Double_t lInverseDesiredBoundaries[1100];
for (Int_t ii = 0; ii < lNDesiredBoundaries; ii++) {
lInverseDesiredBoundaries[ii] = lDesiredBoundaries[lNDesiredBoundaries - (ii + 1)];
cout << "Boundary " << ii << " is " << lInverseDesiredBoundaries[ii] << endl;
}
fPrecisionHistogram = new TH1D("hPrecisionHistogram", "", lNDesiredBoundaries - 1, lInverseDesiredBoundaries);
}
Expand Down
2 changes: 2 additions & 0 deletions Common/Tools/Multiplicity/multCalibrator.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@ class multCalibrator : public TNamed
void SetAnchorPointPercentage(Float_t lPer) { fAnchorPointPercentage = lPer; }

void SetStandardAdaptiveBoundaries(); // standard adaptive (pp-like)
void SetRun3AdaptiveBoundaries(); // Run 3 adaptive (down to 0.00001%)
void SetStandardOnePercentBoundaries(); // standard 1% (Pb-Pb like)
bool IsBinningSane(TH1* histogram); // for safety

// Master Function in this Class: To be called once filenames are set
Bool_t Calibrate();
Expand Down
Loading