From 40f8c01c83e5056b26edbdbbef68fdbfb3ec99c1 Mon Sep 17 00:00:00 2001 From: Gabor Biro Date: Tue, 15 Feb 2022 20:50:44 +0100 Subject: [PATCH 1/8] Added first files of NDRegression --- Common/NDRegression/CMakeLists.txt | 35 ++++++++++++++++++ .../include/NDRegression/NDLocalRegression.h | 37 +++++++++++++++++++ Common/NDRegression/src/NDLocalRegression.cxx | 27 ++++++++++++++ .../NDRegression/test/NDLocalRegressionTest.C | 24 ++++++++++++ 4 files changed, 123 insertions(+) create mode 100644 Common/NDRegression/CMakeLists.txt create mode 100644 Common/NDRegression/include/NDRegression/NDLocalRegression.h create mode 100644 Common/NDRegression/src/NDLocalRegression.cxx create mode 100644 Common/NDRegression/test/NDLocalRegressionTest.C diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt new file mode 100644 index 0000000000000..7864ba90471e1 --- /dev/null +++ b/Common/NDRegression/CMakeLists.txt @@ -0,0 +1,35 @@ +# Copyright 2019-2020 CERN and copyright holders of ALICE O2. +# See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +# All rights not expressly granted are reserved. +# +# This software is distributed under the terms of the GNU General Public +# License v3 (GPL Version 3), copied verbatim in the file "COPYING". +# +# In applying this license CERN does not waive the privileges and immunities +# granted to it by virtue of its status as an Intergovernmental Organization +# or submit itself to any jurisdiction. + +o2_add_library( + NDRegression + SOURCES src/NDLocalRegression.cxx + PUBLIC_LINK_LIBRARIES + # ROOT::Hist + # FairRoot::Base + # O2::CommonConstants + # O2::GPUCommon + # ROOT::GenVector + # ROOT::Geom + # Vc::Vc + ) + +o2_target_root_dictionary( + NDRegression + HEADERS include/NDRegression/NDLocalRegression.h + ) + +o2_add_test( + NDLocalRegressionTest + SOURCES test/NDLocalRegressionTest.cxx + COMPONENT_NAME NDRegression + PUBLIC_LINK_LIBRARIES O2::NDRegression + LABELS whatisthis) diff --git a/Common/NDRegression/include/NDRegression/NDLocalRegression.h b/Common/NDRegression/include/NDRegression/NDLocalRegression.h new file mode 100644 index 0000000000000..782b5f37d5160 --- /dev/null +++ b/Common/NDRegression/include/NDRegression/NDLocalRegression.h @@ -0,0 +1,37 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#ifndef ALICEO2_NDLOCALREGRESSION_H_ +#define ALICEO2_NDLOCALREGRESSION_H_ + +/// \file NDLocalRegression.h +/// \author Gabor Biro, biro.gabor@wigner.hu + +namespace o2 +{ +namespace nd_local_regression +{ + +class NDLocalRegression +{ + + public: + NDLocalRegression() = default; + // NDLocalRegression(); + ~NDLocalRegression() = default; + + bool init(); +}; + +} // namespace nd_local_regression +} // namespace o2 + +#endif diff --git a/Common/NDRegression/src/NDLocalRegression.cxx b/Common/NDRegression/src/NDLocalRegression.cxx new file mode 100644 index 0000000000000..78788c3ff7134 --- /dev/null +++ b/Common/NDRegression/src/NDLocalRegression.cxx @@ -0,0 +1,27 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#include +#include "NDLocalRegression/NDLocalRegression.h" + +ClassImp(o2::NDRegression::NDLocalRegression); + +namespace o2 +{ +namespace nd_local_regression +{ +bool NDLocalRegression::init() +{ + std::cout << "Init done" << endl; + return true; +} +} // namespace nd_local_regression +} // namespace o2 diff --git a/Common/NDRegression/test/NDLocalRegressionTest.C b/Common/NDRegression/test/NDLocalRegressionTest.C new file mode 100644 index 0000000000000..6b366df036ec7 --- /dev/null +++ b/Common/NDRegression/test/NDLocalRegressionTest.C @@ -0,0 +1,24 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDLocalRegressionTest +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK +#include +#include "NDLocalRegression/NDLocalRegression.h" + +BOOST_AUTO_TEST_CASE(NDLocalRegressionTest_test) +{ + auto fitter = o2::nd_local_regression::NDLocalRegression(); + auto success = fitter.init(); + + BOOST_CHECK(success == true); +} From 9a1560c5fca7bea634b9ffc120ab2507ab8b27bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Fri, 18 Feb 2022 16:35:22 +0100 Subject: [PATCH 2/8] Working version of legacy methods --- Common/CMakeLists.txt | 1 + Common/NDRegression/CMakeLists.txt | 14 +- .../include/NDRegression/NDLocalRegression.h | 37 -- .../include/NDRegression/NDRegression.h | 96 ++++ Common/NDRegression/src/NDRegression.cxx | 467 ++++++++++++++++++ ...alRegression.cxx => NDRegressionLinkDef.h} | 20 +- .../NDRegression/test/NDLocalRegressionTest.C | 24 - .../NDRegression/test/NDRegression2DIdeal.cxx | 119 +++++ 8 files changed, 698 insertions(+), 80 deletions(-) delete mode 100644 Common/NDRegression/include/NDRegression/NDLocalRegression.h create mode 100644 Common/NDRegression/include/NDRegression/NDRegression.h create mode 100644 Common/NDRegression/src/NDRegression.cxx rename Common/NDRegression/src/{NDLocalRegression.cxx => NDRegressionLinkDef.h} (64%) delete mode 100644 Common/NDRegression/test/NDLocalRegressionTest.C create mode 100644 Common/NDRegression/test/NDRegression2DIdeal.cxx diff --git a/Common/CMakeLists.txt b/Common/CMakeLists.txt index bfb4a24ff3bc9..d4fa14d96a5fa 100644 --- a/Common/CMakeLists.txt +++ b/Common/CMakeLists.txt @@ -15,5 +15,6 @@ add_subdirectory(Field) add_subdirectory(Types) add_subdirectory(Utils) add_subdirectory(SimConfig) +add_subdirectory(NDRegression) o2_data_file(COPY maps DESTINATION Common) diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt index 7864ba90471e1..b7c407772bc36 100644 --- a/Common/NDRegression/CMakeLists.txt +++ b/Common/NDRegression/CMakeLists.txt @@ -11,8 +11,12 @@ o2_add_library( NDRegression - SOURCES src/NDLocalRegression.cxx + SOURCES src/NDRegression.cxx PUBLIC_LINK_LIBRARIES + O2::MathUtils + O2::Headers + O2::CommonUtils + ROOT::Minuit # ROOT::Hist # FairRoot::Base # O2::CommonConstants @@ -24,12 +28,12 @@ o2_add_library( o2_target_root_dictionary( NDRegression - HEADERS include/NDRegression/NDLocalRegression.h + HEADERS include/NDRegression/NDRegression.h ) o2_add_test( - NDLocalRegressionTest - SOURCES test/NDLocalRegressionTest.cxx + NDRegression2DIdeal + SOURCES test/NDRegression2DIdeal.cxx COMPONENT_NAME NDRegression PUBLIC_LINK_LIBRARIES O2::NDRegression - LABELS whatisthis) + LABELS init tfile ttree histo makefit) diff --git a/Common/NDRegression/include/NDRegression/NDLocalRegression.h b/Common/NDRegression/include/NDRegression/NDLocalRegression.h deleted file mode 100644 index 782b5f37d5160..0000000000000 --- a/Common/NDRegression/include/NDRegression/NDLocalRegression.h +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019-2020 CERN and copyright holders of ALICE O2. -// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. -// All rights not expressly granted are reserved. -// -// This software is distributed under the terms of the GNU General Public -// License v3 (GPL Version 3), copied verbatim in the file "COPYING". -// -// In applying this license CERN does not waive the privileges and immunities -// granted to it by virtue of its status as an Intergovernmental Organization -// or submit itself to any jurisdiction. - -#ifndef ALICEO2_NDLOCALREGRESSION_H_ -#define ALICEO2_NDLOCALREGRESSION_H_ - -/// \file NDLocalRegression.h -/// \author Gabor Biro, biro.gabor@wigner.hu - -namespace o2 -{ -namespace nd_local_regression -{ - -class NDLocalRegression -{ - - public: - NDLocalRegression() = default; - // NDLocalRegression(); - ~NDLocalRegression() = default; - - bool init(); -}; - -} // namespace nd_local_regression -} // namespace o2 - -#endif diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h new file mode 100644 index 0000000000000..6373062ca51b1 --- /dev/null +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -0,0 +1,96 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +/// \file NDRegression.h +/// \author Gabor Biro, biro.gabor@wigner.hu + +#ifndef ALICEO2_NDREGRESSION_H +#define ALICEO2_NDREGRESSION_H + +#include "TNamed.h" +#include "TMath.h" +#include "TFormula.h" +#include "TObjString.h" +#include "TString.h" +#include "TLinearFitter.h" +#include "TMinuit.h" +#include "TVectorD.h" +#include "THn.h" +#include "Rtypes.h" +#include +#include "CommonUtils/TreeStream.h" +#include "CommonUtils/TreeStreamRedirector.h" + +class TreeStreamRedirector; +class THn; + +namespace o2 +{ + +using std::make_shared; +using std::make_unique; +using std::shared_ptr; +using std::unique_ptr; +using utils::TreeStreamRedirector; + +namespace nd_regression +{ + +class NDRegression : public TNamed +{ + public: + NDRegression() = default; + NDRegression(const char* name, const char* title); + ~NDRegression() = default; + // NDRegression(); + + bool init(); + void SetCuts(Double_t nSigma = 6, Double_t robustFraction = 0.95, Int_t estimator = 1); + + void SetStreamer(shared_ptr& streamer) { fStreamer = streamer; } + void EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub); + Bool_t SetHistogram(THn* histo); + + Bool_t MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut = 1e-5, Int_t entries = 1e9, Bool_t useBinNorm = kTRUE); + Bool_t MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction); + + // Bool_t MakeFit(TTree * tree , const char *formulaVal, const char * formulaVar, const char*selection, const char * formulaKernel, const char * dimensionFormula, Double_t weightCut=0.00001, Int_t entries=1000000000, Bool_t useBinNorm=kTRUE); + + protected: + shared_ptr fStreamer; // ! streamer to keep - test intermediate data + THn* fHistPoints; // histogram local point distoribution + + TObjArray* fLocalFitParam; // local fit parameters + RMS + chi2 + TObjArray* fLocalFitQuality; // local fit npoints chi2 + TObjArray* fLocalFitCovar; // local fit covariance matrix + + TMatrixD* fLocalRobustStat; // local robust statistic + + Int_t* fBinIndex; //[fNParameters] working arrays current bin index + Double_t* fBinCenter; //[fNParameters] working current local variables - bin center + Double_t* fBinDelta; //[fNParameters] working current local variables - bin delta + Double_t* fBinWidth; //[fNParameters] working current local variables - bin delta + + Int_t fCutType; // type of the cut 0- no cut 1-cut localmean=median,2-cut localmen=rosbut mean + + Double_t fRobustFractionLTS; // fraction of data used for the robust mean and robust rms estimator (LTS https://en.wikipedia.org/wiki/Least_trimmed_squares) + Double_t fRobustRMSLTSCut; // cut on the robust RMS |value-localmean| +#include "NDRegression/NDRegression.h" + +using namespace o2::nd_regression; + +ClassImp(o2::nd_regression::NDRegression); + +NDRegression::NDRegression(const char* name, const char* title) : TNamed(name, title) {} + +Bool_t NDRegression::init() +{ + fHistPoints = NULL; + fLocalRobustStat = NULL; + + fRobustRMSLTSCut = 0.0; + fRobustFractionLTS = 0.0; + fCutType = 0; + + std::cout << "Init done" << std::endl; + return true; +} + +void NDRegression::SetCuts(Double_t nSigma, Double_t robustFraction, + Int_t estimator) +{ + // + // + // + fRobustFractionLTS = + robustFraction; // fraction of data used for the robust mean and robust + // rms estimator (LTS + // https://en.wikipedia.org/wiki/Least_trimmed_squares) + fRobustRMSLTSCut = nSigma; // cut on the robust RMS + // |value-localmean|GetNbins()); + fLocalFitParam->SetOwner(kTRUE); + fLocalFitQuality = new TObjArray(fHistPoints->GetNbins()); + fLocalFitQuality->SetOwner(kTRUE); + fLocalFitCovar = new TObjArray(fHistPoints->GetNbins()); + fLocalFitCovar->SetOwner(kTRUE); + + // + // Check histogram + // + Int_t ndim = histo->GetNdimensions(); + for (Int_t idim = 0; idim < ndim; idim++) { + TAxis* axis = histo->GetAxis(idim); + if (axis->GetNbins() < 2) { + ::Error("NDRegression::SetHistogram", "Invalid binning nbins<2 %d", axis->GetNbins()); + return false; + } + if (axis->GetXmin() >= axis->GetXmax()) { + ::Error("NDRegression::SetHistogram", "Invalid range <%f,%f", axis->GetXmin(), axis->GetXmax()); + return false; + } + } + + return true; +} + +//_____________________________________________________________________________ +void NDRegression::EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub) +{ + // + // Robust estimator in 1D case MI version - (faster than ROOT version) + // + // For the univariate case + // estimates of location and scatter are returned in mean and sigma parameters + // the algorithm works on the same principle as in multivariate case - + // it finds a subset of size hSub with smallest sigma, and then returns mean and + // sigma of this subset + // + + Int_t hh = hSub; + if (nvectors < 2) { + ::Error("AliMathBase::EvaluateUni", "%s", Form("nvectors = %d, should be > 1", nvectors)); + return; + } + if (hh == nvectors) { + mean = TMath::Mean(nvectors, data); + sigma = TMath::RMS(nvectors, data); + return; + } + if (hh < 2) + hh = (nvectors + 2) / 2; + Double_t faclts[] = {2.6477, 2.5092, 2.3826, 2.2662, 2.1587, 2.0589, 1.9660, 1.879, 1.7973, 1.7203, 1.6473}; + Int_t* index = new Int_t[nvectors]; + TMath::Sort(nvectors, data, index, kFALSE); + + Int_t nquant = TMath::Min(Int_t(Double_t(((hh * 1. / nvectors) - 0.5) * 40)) + 1, 11); + Double_t factor = faclts[TMath::Max(0, nquant - 1)]; + + Double_t sumx = 0; + Double_t sumx2 = 0; + Double_t bestmean = 0; + Double_t bestsigma = (data[index[nvectors - 1]] - data[index[0]] + 1.); // maximal possible sigma + bestsigma *= bestsigma; + + for (Int_t i = 0; i < hh; i++) { + sumx += data[index[i]]; + sumx2 += data[index[i]] * data[index[i]]; + } + + Double_t norm = 1. / Double_t(hh); + Double_t norm2 = 1. / Double_t(hh - 1); + for (Int_t i = hh; i < nvectors; i++) { + Double_t cmean = sumx * norm; + Double_t csigma = (sumx2 - hh * cmean * cmean) * norm2; + if (csigma < bestsigma) { + bestmean = cmean; + bestsigma = csigma; + } + + sumx += data[index[i]] - data[index[i - hh]]; + sumx2 += data[index[i]] * data[index[i]] - data[index[i - hh]] * data[index[i - hh]]; + } + + Double_t bstd = factor * TMath::Sqrt(TMath::Abs(bestsigma)); + mean = bestmean; + sigma = bstd; + delete[] index; +} + +Bool_t NDRegression::MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction) +{ + // + // Calculate robust statistic information + // use raw array to make faster calcualtion + const Int_t kMaxDim = 100; + Double_t* pvalues = values.GetMatrixArray(); + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + } + + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (robustFraction > 1) + robustFraction = 1; + + Int_t nbins = fHistPoints->GetNbins(); // + Int_t npoints = values.GetNrows(); // number of points for fit + if (fLocalRobustStat) { + delete fLocalRobustStat; + } + fLocalRobustStat = new TMatrixD(nbins, 3); + + TVectorD valueLocal(npoints); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); // + for (Int_t idim = 0; idim < nvarFormula; idim++) { + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + Int_t indexLocal = 0; + for (Int_t ipoint = 0; ipoint < npoints; ipoint++) { + Double_t sumChi2 = 0; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + } + if (sumChi2 > nchi2Cut) + continue; + valueLocal[indexLocal] = pvalues[ipoint]; + indexLocal++; + } + Double_t median = 0, meanX = 0, rmsX = 0; + if (indexLocal * robustFraction - 1 > 3) { + median = TMath::Median(indexLocal, valueLocal.GetMatrixArray()); + EvaluateUni(indexLocal, valueLocal.GetMatrixArray(), meanX, rmsX, indexLocal * robustFraction - 1); + } + (*fLocalRobustStat)(ibin, 0) = median; + (*fLocalRobustStat)(ibin, 1) = meanX; + (*fLocalRobustStat)(ibin, 2) = rmsX; + } + return true; +} + +Bool_t NDRegression::MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut, Int_t entries, Bool_t useBinNorm) +{ + // + // Make a local fit in grid as specified by the input THn histogram + // Histogram has to be set before invocation of method + // + // Output: + // array of fit parameters and covariance matrices + // + // Input Parameters: + // tree - input tree + // formulaVal - : separated variable:error string + // formulaVar - : separate varaible list + // selection - selection (cut) for TTreeDraw + // kernelWidth - : separated list of width of kernel for local fitting + // dimenstionFormula - dummy for the moment + // + //Algorithm: + // 1.) Check consistency of input data + // + // 2.) Cache input data from tree to the array of vector TVectorD + // + // 3.) Calculate robust local mean and robust local RMS in case outlier removal algorithm specified + // + // 4.) Make local fit + // + // const Double_t kEpsilon=1e-6; + const Int_t kMaxDim = 100; + Int_t binRange[kMaxDim] = {0}; + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (fHistPoints == NULL) { + ::Error("NDRegression::MakeFit", "ND histogram not initialized\n"); + return kFALSE; + } + if (tree == NULL || tree->GetEntries() == 0) { + ::Error("NDRegression::MakeFit", "Empty tree\n"); + return kFALSE; + } + if (formulaVar == NULL || formulaVar == 0) { + ::Error("NDRegression::MakeFit", "Empty variable list\n"); + return kFALSE; + } + if (formulaKernel == NULL) { + ::Error("NDRegression::MakeFit", "Kernel width not specified\n"); + return kFALSE; + } + // fUseBinNorm = useBinNorm; + // + // fInputTree = tree; // should be better TRef? + // fFormulaVal = new TObjString(formulaVal); + // fFormulaVar = new TObjString(formulaVar); + // fSelection = new TObjString(selection); + // fKernelWidthFormula = new TObjString(formulaKernel); + // fPolDimensionFormula = new TObjString(dimensionFormula); + auto arrayFormulaVar = TString(formulaVar).Tokenize(":"); + Int_t nvarFormula = arrayFormulaVar->GetEntries(); + if (nvarFormula != fHistPoints->GetNdimensions()) { + ::Error("NDRegression::MakeFit", "Histogram/points mismatch\n"); + return kFALSE; + } + + // for (int i = 0; i < nvarFormula; i++) { + // std::cout << ((TString*)(arrayFormulaVar->At(i)))->View() << "\n"; + // } + + TObjArray* arrayKernel = TString(formulaKernel).Tokenize(":"); + Int_t nwidthFormula = arrayKernel->GetEntries(); + if (nvarFormula != nwidthFormula) { + delete arrayKernel; + delete arrayFormulaVar; + ::Error("NDRegression::MakeFit", "Variable/Kernel mismath\n"); + return kFALSE; + } + // fNParameters = nvarFormula; + + // + // 2.) Load input data + // + // + Int_t entriesVal = tree->Draw(formulaVal, selection, "goffpara", entries); + if (entriesVal == 0) { + ::Error("NDRegression::MakeFit", "Empty point list\t%s\t%s\n", formulaVal, selection); + return kFALSE; + } + if (tree->GetVal(0) == NULL || (tree->GetVal(1) == NULL)) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + TVectorD values(entriesVal, tree->GetVal(0)); + TVectorD errors(entriesVal, tree->GetVal(1)); + Double_t* pvalues = values.GetMatrixArray(); + Double_t* perrors = errors.GetMatrixArray(); + + // std::cout << std::endl; + // values.Print(); + // std::cout << std::endl; + + // 2.b) variables + TObjArray pointArray(nvarFormula); + Int_t entriesVar = tree->Draw(formulaVar, selection, "goffpara", entries); + if (entriesVal != entriesVar) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) + pointArray.AddAt(new TVectorD(entriesVar, tree->GetVal(ipar)), ipar); + + // 2.c) kernel array + TObjArray kernelArrayI2(nvarFormula); + tree->Draw(formulaKernel, selection, "goffpara", entries); + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) { + TVectorD* vdI2 = new TVectorD(entriesVar, tree->GetVal(ipar)); + for (int k = entriesVar; k--;) { // to speed up, precalculate inverse squared + double kv = (*vdI2)[k]; + if (TMath::Abs(kv) < 1e-12) + ::Fatal("NDRegression::MakeFit", "Kernel width=%f for entry %d of par:%d", kv, k, ipar); + (*vdI2)[k] = 1. / (kv * kv); + } + kernelArrayI2.AddAt(vdI2, ipar); + } + // + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + binRange[idim] = fHistPoints->GetAxis(idim)->GetNbins(); + } + // + // + // + Int_t nbins = fHistPoints->GetNbins(); + fBinIndex = new Int_t[fHistPoints->GetNdimensions()]; + fBinCenter = new Double_t[fHistPoints->GetNdimensions()]; + fBinDelta = new Double_t[fHistPoints->GetNdimensions()]; + fBinWidth = new Double_t[fHistPoints->GetNdimensions()]; + + // + // 3.) + // + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + MakeRobustStatistic(values, errors, pointArray, kernelArrayI2, nvarFormula, weightCut, fRobustFractionLTS); + } + // + + // 4.) Make local fits + // + + Double_t* binHypFit = new Double_t[2 * fHistPoints->GetNdimensions()]; + // + TLinearFitter fitter(1 + 2 * nvarFormula, TString::Format("hyp%d", 2 * nvarFormula).Data()); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); + Bool_t isUnderFlowBin = kFALSE; + Bool_t isOverFlowBin = kFALSE; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + if (fBinIndex[idim] == 0) + isUnderFlowBin = kTRUE; + if (fBinIndex[idim] > binRange[idim]) + isOverFlowBin = kTRUE; + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + if (isUnderFlowBin || isOverFlowBin) + continue; + fitter.ClearPoints(); + // add fit points + for (Int_t ipoint = 0; ipoint < entriesVal; ipoint++) { + Double_t sumChi2 = 0; + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + Double_t localRMS = (*fLocalRobustStat)(ibin, 2); + Double_t localMean = (*fLocalRobustStat)(ibin, 1); + Double_t localMedian = (*fLocalRobustStat)(ibin, 0); + if (fCutType == 1) { + if (TMath::Abs(pvalues[ipoint] - localMedian) > fRobustRMSLTSCut * localRMS) + continue; + } + if (fCutType == 2) { + if (TMath::Abs(pvalues[ipoint] - localMean) > fRobustRMSLTSCut * localRMS) + continue; + } + } + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + if (useBinNorm) { + binHypFit[2 * idim] = fBinDelta[idim] / fBinWidth[idim]; + binHypFit[2 * idim + 1] = binHypFit[2 * idim] * binHypFit[2 * idim]; + } else { + binHypFit[2 * idim] = fBinDelta[idim]; + binHypFit[2 * idim + 1] = fBinDelta[idim] * fBinDelta[idim]; + } + } + if (sumChi2 > nchi2Cut) + continue; + // Double_t weight=TMath::Exp(-sumChi2*0.5); + // fitter.AddPoint(binHypFit,pvalues[ipoint], perrors[ipoint]/weight); + Double_t weightI = TMath::Exp(sumChi2 * 0.5); + fitter.AddPoint(binHypFit, pvalues[ipoint], perrors[ipoint] * weightI); + } + TVectorD* fitParam = new TVectorD(nvarFormula * 2 + 1); + TVectorD* fitQuality = new TVectorD(3); + TMatrixD* fitCovar = new TMatrixD(nvarFormula * 2 + 1, nvarFormula * 2 + 1); + Double_t normRMS = 0; + Int_t nBinPoints = fitter.GetNpoints(); + Bool_t fitOK = kFALSE; + (*fitQuality)[0] = 0; + (*fitQuality)[1] = 0; + (*fitQuality)[2] = 0; + + if (fitter.GetNpoints() > nvarFormula * 2 + 2) { + + if (fCutType == 3) + fitOK = (fitter.EvalRobust() == 0); + else + fitOK = (fitter.Eval() == 0); + + if (fitOK) { + normRMS = fitter.GetChisquare() / (fitter.GetNpoints() - fitter.GetNumberFreeParameters()); + fitter.GetParameters(*fitParam); + fitter.GetCovarianceMatrix(*fitCovar); + (*fitQuality)[0] = nBinPoints; + (*fitQuality)[1] = normRMS; + (*fitQuality)[2] = ibin; + fLocalFitParam->AddAt(fitParam, ibin); + fLocalFitQuality->AddAt(fitQuality, ibin); + fLocalFitCovar->AddAt(fitCovar, ibin); + } + } + if (fStreamer) { + TVectorD pfBinCenter(nvarFormula, fBinCenter); + Double_t median = 0, mean = 0, rms = 0; + if (fLocalRobustStat) { + median = (*fLocalRobustStat)(ibin, 0); + mean = (*fLocalRobustStat)(ibin, 1); + rms = (*fLocalRobustStat)(ibin, 2); + } + (*fStreamer) << "localFit" + << "ibin=" << ibin << // bin index + "fitOK=" << fitOK << "localMedian=" << median << "localMean=" << mean << "localRMS=" << rms << "nBinPoints=" << nBinPoints << // center of the bin + "binCenter.=" << &pfBinCenter << // + "normRMS=" << normRMS << "fitParam.=" << fitParam << "fitCovar.=" << fitCovar << "fitOK=" << fitOK << "\n"; + } + if (!fitOK) { // avoid memory leak for failed fits + delete fitParam; + delete fitQuality; + delete fitCovar; + } + } + + return kTRUE; +} diff --git a/Common/NDRegression/src/NDLocalRegression.cxx b/Common/NDRegression/src/NDRegressionLinkDef.h similarity index 64% rename from Common/NDRegression/src/NDLocalRegression.cxx rename to Common/NDRegression/src/NDRegressionLinkDef.h index 78788c3ff7134..ee149996aca0e 100644 --- a/Common/NDRegression/src/NDLocalRegression.cxx +++ b/Common/NDRegression/src/NDRegressionLinkDef.h @@ -9,19 +9,11 @@ // granted to it by virtue of its status as an Intergovernmental Organization // or submit itself to any jurisdiction. -#include -#include "NDLocalRegression/NDLocalRegression.h" +#ifdef __CLING__ +#pragma link off all globals; +#pragma link off all classes; +#pragma link off all functions; -ClassImp(o2::NDRegression::NDLocalRegression); +#pragma link C++ class o2::nd_regression::NDRegression + ; -namespace o2 -{ -namespace nd_local_regression -{ -bool NDLocalRegression::init() -{ - std::cout << "Init done" << endl; - return true; -} -} // namespace nd_local_regression -} // namespace o2 +#endif diff --git a/Common/NDRegression/test/NDLocalRegressionTest.C b/Common/NDRegression/test/NDLocalRegressionTest.C deleted file mode 100644 index 6b366df036ec7..0000000000000 --- a/Common/NDRegression/test/NDLocalRegressionTest.C +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019-2020 CERN and copyright holders of ALICE O2. -// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. -// All rights not expressly granted are reserved. -// -// This software is distributed under the terms of the GNU General Public -// License v3 (GPL Version 3), copied verbatim in the file "COPYING". -// -// In applying this license CERN does not waive the privileges and immunities -// granted to it by virtue of its status as an Intergovernmental Organization -// or submit itself to any jurisdiction. - -#define BOOST_TEST_MODULE Test NDLocalRegressionTest -#define BOOST_TEST_MAIN -#define BOOST_TEST_DYN_LINK -#include -#include "NDLocalRegression/NDLocalRegression.h" - -BOOST_AUTO_TEST_CASE(NDLocalRegressionTest_test) -{ - auto fitter = o2::nd_local_regression::NDLocalRegression(); - auto success = fitter.init(); - - BOOST_CHECK(success == true); -} diff --git a/Common/NDRegression/test/NDRegression2DIdeal.cxx b/Common/NDRegression/test/NDRegression2DIdeal.cxx new file mode 100644 index 0000000000000..691c7927acb14 --- /dev/null +++ b/Common/NDRegression/test/NDRegression2DIdeal.cxx @@ -0,0 +1,119 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDRegression2DIdeal +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK + +#include +#include "NDRegression/NDRegression.h" +#include "TRandom.h" +#include "TFile.h" +#include "THn.h" + +// namespace o2::utils +// { +// class TreeStreamRedirector; +// } + +// using o2::utils::TreeStreamRedirector; +using std::make_shared; +using std::make_unique; + +BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) +{ + auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); + auto success = pfitNDIdeal->init(); + + BOOST_CHECK(success == true); + + // + // 0.) Initialization of variables and THn + // + Int_t ndim = 2; + Double_t err = 0.1; + + o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); + auto pcstreamOutIdeal = make_shared("fitNDLocalTestOutputIdeal.root", "recreate"); + + Double_t* xyz = new Double_t[ndim]; + Double_t* sxyz = new Double_t[ndim]; + Int_t* nbins = new Int_t[ndim]; + Double_t* xmin = new Double_t[ndim]; + Double_t* xmax = new Double_t[ndim]; + TString** chxyz = new TString*[ndim]; + for (Int_t idim = 0; idim < ndim; idim++) { + chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); + nbins[idim] = 40; + xmin[idim] = 0; + xmax[idim] = 1; + } + + auto pformula = make_shared("pformula", "cos(7*x[0]/pi)*sin(11*x[1]/pi)"); + auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); + // + // 1.) generate random input points + // + for (Int_t ipoint = 0; ipoint < 1e4; ipoint++) { + for (Int_t idim = 0; idim < ndim; idim++) { + xyz[idim] = gRandom->Rndm(); + } + Double_t value = pformula->EvalPar(xyz, 0); + Double_t noise = gRandom->Gaus() * err; + Double_t noise2 = noise * (1 + (gRandom->Rndm() < 0.1) * 100); // noise with 10 percent of outliers + Double_t noiseBreit = gRandom->BreitWigner() * err; + pcstreamIn << "testInput" + << "val=" << value << "err=" << err << "noise=" << noise << // gausian noise + "noise2=" << noise2 << // gausian noise + 10% of outliers + "noiseBreit=" << noiseBreit; + for (Int_t idim = 0; idim < ndim; idim++) { + pcstreamIn << "testInput" << chxyz[idim]->Data() << xyz[idim]; + } + pcstreamIn << "testInput" + << "\n"; + } + pcstreamIn.Close(); + std::cout << "testfile done"; + + TFile inpf("fitNDLocalTestInput.root"); + BOOST_CHECK(!inpf.IsZombie()); + auto treeIn = (TTree*)(inpf.GetFile()->Get("testInput")); + BOOST_CHECK(treeIn); + + pfitNDIdeal->SetStreamer(pcstreamOutIdeal); + + success = pfitNDIdeal->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + + success = pfitNDIdeal->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); + BOOST_CHECK(success); + + treeIn->Draw("(AliNDLocalRegression::GetCorrND(5,xyz0,xyz1)-AliNDLocalRegression::GetCorrND(4,xyz0,xyz1))/sqrt(AliNDLocalRegression::GetCorrNDError(4,xyz0,xyz1)**2+AliNDLocalRegression::GetCorrNDError(5,xyz0,xyz1)**2)>>pullsBreiWigner54(401,-20.5,20.5)", "", ""); + TH1F* pullsBreiWigner54 = (TH1F*)gPad->GetPrimitive("pullsBreiWigner54"); + Double_t meanPullsBreitWigner = treeIn->GetHistogram()->GetMean(); + Double_t meanPullsBreitWignerErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsPullsBreitWigner = treeIn->GetHistogram()->GetRMS(); + Double_t rmsPullsBreitWignerErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanPullsBreitWigner) < 10 * meanPullsBreitWignerErr) { + ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); + } else { + ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull NOT OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); + } + if (rmsPullsBreitWigner < 1 + rmsPullsBreitWignerErr) { + ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); + } else { + ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull NOT OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); + } + PlotData(pullsBreiWigner54, "BreitWigner pulls", "counts (arb. units)", kBlue + 2, "zTitle", rmsPullsBreitWigner, rmsPullsBreitWignerErr, meanPullsBreitWigner, meanPullsBreitWignerErr); + canvasUnitBreitWigner->SaveAs("AliNDLocalRegressionTest.canvasUnitBreitWigner.png"); + + inpf.Close(); +} From 50ad285551cccf0612b6950b409abf97c6ba8509 Mon Sep 17 00:00:00 2001 From: Gabor Biro Date: Tue, 15 Feb 2022 20:50:44 +0100 Subject: [PATCH 3/8] Added first files of NDRegression --- Common/NDRegression/CMakeLists.txt | 35 ++++++++++++++++++ .../include/NDRegression/NDLocalRegression.h | 37 +++++++++++++++++++ Common/NDRegression/src/NDLocalRegression.cxx | 27 ++++++++++++++ .../NDRegression/test/NDLocalRegressionTest.C | 24 ++++++++++++ 4 files changed, 123 insertions(+) create mode 100644 Common/NDRegression/CMakeLists.txt create mode 100644 Common/NDRegression/include/NDRegression/NDLocalRegression.h create mode 100644 Common/NDRegression/src/NDLocalRegression.cxx create mode 100644 Common/NDRegression/test/NDLocalRegressionTest.C diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt new file mode 100644 index 0000000000000..7864ba90471e1 --- /dev/null +++ b/Common/NDRegression/CMakeLists.txt @@ -0,0 +1,35 @@ +# Copyright 2019-2020 CERN and copyright holders of ALICE O2. +# See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +# All rights not expressly granted are reserved. +# +# This software is distributed under the terms of the GNU General Public +# License v3 (GPL Version 3), copied verbatim in the file "COPYING". +# +# In applying this license CERN does not waive the privileges and immunities +# granted to it by virtue of its status as an Intergovernmental Organization +# or submit itself to any jurisdiction. + +o2_add_library( + NDRegression + SOURCES src/NDLocalRegression.cxx + PUBLIC_LINK_LIBRARIES + # ROOT::Hist + # FairRoot::Base + # O2::CommonConstants + # O2::GPUCommon + # ROOT::GenVector + # ROOT::Geom + # Vc::Vc + ) + +o2_target_root_dictionary( + NDRegression + HEADERS include/NDRegression/NDLocalRegression.h + ) + +o2_add_test( + NDLocalRegressionTest + SOURCES test/NDLocalRegressionTest.cxx + COMPONENT_NAME NDRegression + PUBLIC_LINK_LIBRARIES O2::NDRegression + LABELS whatisthis) diff --git a/Common/NDRegression/include/NDRegression/NDLocalRegression.h b/Common/NDRegression/include/NDRegression/NDLocalRegression.h new file mode 100644 index 0000000000000..782b5f37d5160 --- /dev/null +++ b/Common/NDRegression/include/NDRegression/NDLocalRegression.h @@ -0,0 +1,37 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#ifndef ALICEO2_NDLOCALREGRESSION_H_ +#define ALICEO2_NDLOCALREGRESSION_H_ + +/// \file NDLocalRegression.h +/// \author Gabor Biro, biro.gabor@wigner.hu + +namespace o2 +{ +namespace nd_local_regression +{ + +class NDLocalRegression +{ + + public: + NDLocalRegression() = default; + // NDLocalRegression(); + ~NDLocalRegression() = default; + + bool init(); +}; + +} // namespace nd_local_regression +} // namespace o2 + +#endif diff --git a/Common/NDRegression/src/NDLocalRegression.cxx b/Common/NDRegression/src/NDLocalRegression.cxx new file mode 100644 index 0000000000000..78788c3ff7134 --- /dev/null +++ b/Common/NDRegression/src/NDLocalRegression.cxx @@ -0,0 +1,27 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#include +#include "NDLocalRegression/NDLocalRegression.h" + +ClassImp(o2::NDRegression::NDLocalRegression); + +namespace o2 +{ +namespace nd_local_regression +{ +bool NDLocalRegression::init() +{ + std::cout << "Init done" << endl; + return true; +} +} // namespace nd_local_regression +} // namespace o2 diff --git a/Common/NDRegression/test/NDLocalRegressionTest.C b/Common/NDRegression/test/NDLocalRegressionTest.C new file mode 100644 index 0000000000000..6b366df036ec7 --- /dev/null +++ b/Common/NDRegression/test/NDLocalRegressionTest.C @@ -0,0 +1,24 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDLocalRegressionTest +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK +#include +#include "NDLocalRegression/NDLocalRegression.h" + +BOOST_AUTO_TEST_CASE(NDLocalRegressionTest_test) +{ + auto fitter = o2::nd_local_regression::NDLocalRegression(); + auto success = fitter.init(); + + BOOST_CHECK(success == true); +} From 5fa5c6392cfe3370fa938269ed87ab1bf008424c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Fri, 18 Feb 2022 16:35:22 +0100 Subject: [PATCH 4/8] Working version of legacy methods --- Common/CMakeLists.txt | 1 + Common/NDRegression/CMakeLists.txt | 14 +- .../include/NDRegression/NDLocalRegression.h | 37 -- .../include/NDRegression/NDRegression.h | 96 ++++ Common/NDRegression/src/NDRegression.cxx | 467 ++++++++++++++++++ ...alRegression.cxx => NDRegressionLinkDef.h} | 20 +- .../NDRegression/test/NDLocalRegressionTest.C | 24 - .../NDRegression/test/NDRegression2DIdeal.cxx | 119 +++++ 8 files changed, 698 insertions(+), 80 deletions(-) delete mode 100644 Common/NDRegression/include/NDRegression/NDLocalRegression.h create mode 100644 Common/NDRegression/include/NDRegression/NDRegression.h create mode 100644 Common/NDRegression/src/NDRegression.cxx rename Common/NDRegression/src/{NDLocalRegression.cxx => NDRegressionLinkDef.h} (64%) delete mode 100644 Common/NDRegression/test/NDLocalRegressionTest.C create mode 100644 Common/NDRegression/test/NDRegression2DIdeal.cxx diff --git a/Common/CMakeLists.txt b/Common/CMakeLists.txt index bfb4a24ff3bc9..d4fa14d96a5fa 100644 --- a/Common/CMakeLists.txt +++ b/Common/CMakeLists.txt @@ -15,5 +15,6 @@ add_subdirectory(Field) add_subdirectory(Types) add_subdirectory(Utils) add_subdirectory(SimConfig) +add_subdirectory(NDRegression) o2_data_file(COPY maps DESTINATION Common) diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt index 7864ba90471e1..b7c407772bc36 100644 --- a/Common/NDRegression/CMakeLists.txt +++ b/Common/NDRegression/CMakeLists.txt @@ -11,8 +11,12 @@ o2_add_library( NDRegression - SOURCES src/NDLocalRegression.cxx + SOURCES src/NDRegression.cxx PUBLIC_LINK_LIBRARIES + O2::MathUtils + O2::Headers + O2::CommonUtils + ROOT::Minuit # ROOT::Hist # FairRoot::Base # O2::CommonConstants @@ -24,12 +28,12 @@ o2_add_library( o2_target_root_dictionary( NDRegression - HEADERS include/NDRegression/NDLocalRegression.h + HEADERS include/NDRegression/NDRegression.h ) o2_add_test( - NDLocalRegressionTest - SOURCES test/NDLocalRegressionTest.cxx + NDRegression2DIdeal + SOURCES test/NDRegression2DIdeal.cxx COMPONENT_NAME NDRegression PUBLIC_LINK_LIBRARIES O2::NDRegression - LABELS whatisthis) + LABELS init tfile ttree histo makefit) diff --git a/Common/NDRegression/include/NDRegression/NDLocalRegression.h b/Common/NDRegression/include/NDRegression/NDLocalRegression.h deleted file mode 100644 index 782b5f37d5160..0000000000000 --- a/Common/NDRegression/include/NDRegression/NDLocalRegression.h +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019-2020 CERN and copyright holders of ALICE O2. -// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. -// All rights not expressly granted are reserved. -// -// This software is distributed under the terms of the GNU General Public -// License v3 (GPL Version 3), copied verbatim in the file "COPYING". -// -// In applying this license CERN does not waive the privileges and immunities -// granted to it by virtue of its status as an Intergovernmental Organization -// or submit itself to any jurisdiction. - -#ifndef ALICEO2_NDLOCALREGRESSION_H_ -#define ALICEO2_NDLOCALREGRESSION_H_ - -/// \file NDLocalRegression.h -/// \author Gabor Biro, biro.gabor@wigner.hu - -namespace o2 -{ -namespace nd_local_regression -{ - -class NDLocalRegression -{ - - public: - NDLocalRegression() = default; - // NDLocalRegression(); - ~NDLocalRegression() = default; - - bool init(); -}; - -} // namespace nd_local_regression -} // namespace o2 - -#endif diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h new file mode 100644 index 0000000000000..6373062ca51b1 --- /dev/null +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -0,0 +1,96 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +/// \file NDRegression.h +/// \author Gabor Biro, biro.gabor@wigner.hu + +#ifndef ALICEO2_NDREGRESSION_H +#define ALICEO2_NDREGRESSION_H + +#include "TNamed.h" +#include "TMath.h" +#include "TFormula.h" +#include "TObjString.h" +#include "TString.h" +#include "TLinearFitter.h" +#include "TMinuit.h" +#include "TVectorD.h" +#include "THn.h" +#include "Rtypes.h" +#include +#include "CommonUtils/TreeStream.h" +#include "CommonUtils/TreeStreamRedirector.h" + +class TreeStreamRedirector; +class THn; + +namespace o2 +{ + +using std::make_shared; +using std::make_unique; +using std::shared_ptr; +using std::unique_ptr; +using utils::TreeStreamRedirector; + +namespace nd_regression +{ + +class NDRegression : public TNamed +{ + public: + NDRegression() = default; + NDRegression(const char* name, const char* title); + ~NDRegression() = default; + // NDRegression(); + + bool init(); + void SetCuts(Double_t nSigma = 6, Double_t robustFraction = 0.95, Int_t estimator = 1); + + void SetStreamer(shared_ptr& streamer) { fStreamer = streamer; } + void EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub); + Bool_t SetHistogram(THn* histo); + + Bool_t MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut = 1e-5, Int_t entries = 1e9, Bool_t useBinNorm = kTRUE); + Bool_t MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction); + + // Bool_t MakeFit(TTree * tree , const char *formulaVal, const char * formulaVar, const char*selection, const char * formulaKernel, const char * dimensionFormula, Double_t weightCut=0.00001, Int_t entries=1000000000, Bool_t useBinNorm=kTRUE); + + protected: + shared_ptr fStreamer; // ! streamer to keep - test intermediate data + THn* fHistPoints; // histogram local point distoribution + + TObjArray* fLocalFitParam; // local fit parameters + RMS + chi2 + TObjArray* fLocalFitQuality; // local fit npoints chi2 + TObjArray* fLocalFitCovar; // local fit covariance matrix + + TMatrixD* fLocalRobustStat; // local robust statistic + + Int_t* fBinIndex; //[fNParameters] working arrays current bin index + Double_t* fBinCenter; //[fNParameters] working current local variables - bin center + Double_t* fBinDelta; //[fNParameters] working current local variables - bin delta + Double_t* fBinWidth; //[fNParameters] working current local variables - bin delta + + Int_t fCutType; // type of the cut 0- no cut 1-cut localmean=median,2-cut localmen=rosbut mean + + Double_t fRobustFractionLTS; // fraction of data used for the robust mean and robust rms estimator (LTS https://en.wikipedia.org/wiki/Least_trimmed_squares) + Double_t fRobustRMSLTSCut; // cut on the robust RMS |value-localmean| +#include "NDRegression/NDRegression.h" + +using namespace o2::nd_regression; + +ClassImp(o2::nd_regression::NDRegression); + +NDRegression::NDRegression(const char* name, const char* title) : TNamed(name, title) {} + +Bool_t NDRegression::init() +{ + fHistPoints = NULL; + fLocalRobustStat = NULL; + + fRobustRMSLTSCut = 0.0; + fRobustFractionLTS = 0.0; + fCutType = 0; + + std::cout << "Init done" << std::endl; + return true; +} + +void NDRegression::SetCuts(Double_t nSigma, Double_t robustFraction, + Int_t estimator) +{ + // + // + // + fRobustFractionLTS = + robustFraction; // fraction of data used for the robust mean and robust + // rms estimator (LTS + // https://en.wikipedia.org/wiki/Least_trimmed_squares) + fRobustRMSLTSCut = nSigma; // cut on the robust RMS + // |value-localmean|GetNbins()); + fLocalFitParam->SetOwner(kTRUE); + fLocalFitQuality = new TObjArray(fHistPoints->GetNbins()); + fLocalFitQuality->SetOwner(kTRUE); + fLocalFitCovar = new TObjArray(fHistPoints->GetNbins()); + fLocalFitCovar->SetOwner(kTRUE); + + // + // Check histogram + // + Int_t ndim = histo->GetNdimensions(); + for (Int_t idim = 0; idim < ndim; idim++) { + TAxis* axis = histo->GetAxis(idim); + if (axis->GetNbins() < 2) { + ::Error("NDRegression::SetHistogram", "Invalid binning nbins<2 %d", axis->GetNbins()); + return false; + } + if (axis->GetXmin() >= axis->GetXmax()) { + ::Error("NDRegression::SetHistogram", "Invalid range <%f,%f", axis->GetXmin(), axis->GetXmax()); + return false; + } + } + + return true; +} + +//_____________________________________________________________________________ +void NDRegression::EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub) +{ + // + // Robust estimator in 1D case MI version - (faster than ROOT version) + // + // For the univariate case + // estimates of location and scatter are returned in mean and sigma parameters + // the algorithm works on the same principle as in multivariate case - + // it finds a subset of size hSub with smallest sigma, and then returns mean and + // sigma of this subset + // + + Int_t hh = hSub; + if (nvectors < 2) { + ::Error("AliMathBase::EvaluateUni", "%s", Form("nvectors = %d, should be > 1", nvectors)); + return; + } + if (hh == nvectors) { + mean = TMath::Mean(nvectors, data); + sigma = TMath::RMS(nvectors, data); + return; + } + if (hh < 2) + hh = (nvectors + 2) / 2; + Double_t faclts[] = {2.6477, 2.5092, 2.3826, 2.2662, 2.1587, 2.0589, 1.9660, 1.879, 1.7973, 1.7203, 1.6473}; + Int_t* index = new Int_t[nvectors]; + TMath::Sort(nvectors, data, index, kFALSE); + + Int_t nquant = TMath::Min(Int_t(Double_t(((hh * 1. / nvectors) - 0.5) * 40)) + 1, 11); + Double_t factor = faclts[TMath::Max(0, nquant - 1)]; + + Double_t sumx = 0; + Double_t sumx2 = 0; + Double_t bestmean = 0; + Double_t bestsigma = (data[index[nvectors - 1]] - data[index[0]] + 1.); // maximal possible sigma + bestsigma *= bestsigma; + + for (Int_t i = 0; i < hh; i++) { + sumx += data[index[i]]; + sumx2 += data[index[i]] * data[index[i]]; + } + + Double_t norm = 1. / Double_t(hh); + Double_t norm2 = 1. / Double_t(hh - 1); + for (Int_t i = hh; i < nvectors; i++) { + Double_t cmean = sumx * norm; + Double_t csigma = (sumx2 - hh * cmean * cmean) * norm2; + if (csigma < bestsigma) { + bestmean = cmean; + bestsigma = csigma; + } + + sumx += data[index[i]] - data[index[i - hh]]; + sumx2 += data[index[i]] * data[index[i]] - data[index[i - hh]] * data[index[i - hh]]; + } + + Double_t bstd = factor * TMath::Sqrt(TMath::Abs(bestsigma)); + mean = bestmean; + sigma = bstd; + delete[] index; +} + +Bool_t NDRegression::MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction) +{ + // + // Calculate robust statistic information + // use raw array to make faster calcualtion + const Int_t kMaxDim = 100; + Double_t* pvalues = values.GetMatrixArray(); + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + } + + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (robustFraction > 1) + robustFraction = 1; + + Int_t nbins = fHistPoints->GetNbins(); // + Int_t npoints = values.GetNrows(); // number of points for fit + if (fLocalRobustStat) { + delete fLocalRobustStat; + } + fLocalRobustStat = new TMatrixD(nbins, 3); + + TVectorD valueLocal(npoints); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); // + for (Int_t idim = 0; idim < nvarFormula; idim++) { + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + Int_t indexLocal = 0; + for (Int_t ipoint = 0; ipoint < npoints; ipoint++) { + Double_t sumChi2 = 0; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + } + if (sumChi2 > nchi2Cut) + continue; + valueLocal[indexLocal] = pvalues[ipoint]; + indexLocal++; + } + Double_t median = 0, meanX = 0, rmsX = 0; + if (indexLocal * robustFraction - 1 > 3) { + median = TMath::Median(indexLocal, valueLocal.GetMatrixArray()); + EvaluateUni(indexLocal, valueLocal.GetMatrixArray(), meanX, rmsX, indexLocal * robustFraction - 1); + } + (*fLocalRobustStat)(ibin, 0) = median; + (*fLocalRobustStat)(ibin, 1) = meanX; + (*fLocalRobustStat)(ibin, 2) = rmsX; + } + return true; +} + +Bool_t NDRegression::MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut, Int_t entries, Bool_t useBinNorm) +{ + // + // Make a local fit in grid as specified by the input THn histogram + // Histogram has to be set before invocation of method + // + // Output: + // array of fit parameters and covariance matrices + // + // Input Parameters: + // tree - input tree + // formulaVal - : separated variable:error string + // formulaVar - : separate varaible list + // selection - selection (cut) for TTreeDraw + // kernelWidth - : separated list of width of kernel for local fitting + // dimenstionFormula - dummy for the moment + // + //Algorithm: + // 1.) Check consistency of input data + // + // 2.) Cache input data from tree to the array of vector TVectorD + // + // 3.) Calculate robust local mean and robust local RMS in case outlier removal algorithm specified + // + // 4.) Make local fit + // + // const Double_t kEpsilon=1e-6; + const Int_t kMaxDim = 100; + Int_t binRange[kMaxDim] = {0}; + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (fHistPoints == NULL) { + ::Error("NDRegression::MakeFit", "ND histogram not initialized\n"); + return kFALSE; + } + if (tree == NULL || tree->GetEntries() == 0) { + ::Error("NDRegression::MakeFit", "Empty tree\n"); + return kFALSE; + } + if (formulaVar == NULL || formulaVar == 0) { + ::Error("NDRegression::MakeFit", "Empty variable list\n"); + return kFALSE; + } + if (formulaKernel == NULL) { + ::Error("NDRegression::MakeFit", "Kernel width not specified\n"); + return kFALSE; + } + // fUseBinNorm = useBinNorm; + // + // fInputTree = tree; // should be better TRef? + // fFormulaVal = new TObjString(formulaVal); + // fFormulaVar = new TObjString(formulaVar); + // fSelection = new TObjString(selection); + // fKernelWidthFormula = new TObjString(formulaKernel); + // fPolDimensionFormula = new TObjString(dimensionFormula); + auto arrayFormulaVar = TString(formulaVar).Tokenize(":"); + Int_t nvarFormula = arrayFormulaVar->GetEntries(); + if (nvarFormula != fHistPoints->GetNdimensions()) { + ::Error("NDRegression::MakeFit", "Histogram/points mismatch\n"); + return kFALSE; + } + + // for (int i = 0; i < nvarFormula; i++) { + // std::cout << ((TString*)(arrayFormulaVar->At(i)))->View() << "\n"; + // } + + TObjArray* arrayKernel = TString(formulaKernel).Tokenize(":"); + Int_t nwidthFormula = arrayKernel->GetEntries(); + if (nvarFormula != nwidthFormula) { + delete arrayKernel; + delete arrayFormulaVar; + ::Error("NDRegression::MakeFit", "Variable/Kernel mismath\n"); + return kFALSE; + } + // fNParameters = nvarFormula; + + // + // 2.) Load input data + // + // + Int_t entriesVal = tree->Draw(formulaVal, selection, "goffpara", entries); + if (entriesVal == 0) { + ::Error("NDRegression::MakeFit", "Empty point list\t%s\t%s\n", formulaVal, selection); + return kFALSE; + } + if (tree->GetVal(0) == NULL || (tree->GetVal(1) == NULL)) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + TVectorD values(entriesVal, tree->GetVal(0)); + TVectorD errors(entriesVal, tree->GetVal(1)); + Double_t* pvalues = values.GetMatrixArray(); + Double_t* perrors = errors.GetMatrixArray(); + + // std::cout << std::endl; + // values.Print(); + // std::cout << std::endl; + + // 2.b) variables + TObjArray pointArray(nvarFormula); + Int_t entriesVar = tree->Draw(formulaVar, selection, "goffpara", entries); + if (entriesVal != entriesVar) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) + pointArray.AddAt(new TVectorD(entriesVar, tree->GetVal(ipar)), ipar); + + // 2.c) kernel array + TObjArray kernelArrayI2(nvarFormula); + tree->Draw(formulaKernel, selection, "goffpara", entries); + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) { + TVectorD* vdI2 = new TVectorD(entriesVar, tree->GetVal(ipar)); + for (int k = entriesVar; k--;) { // to speed up, precalculate inverse squared + double kv = (*vdI2)[k]; + if (TMath::Abs(kv) < 1e-12) + ::Fatal("NDRegression::MakeFit", "Kernel width=%f for entry %d of par:%d", kv, k, ipar); + (*vdI2)[k] = 1. / (kv * kv); + } + kernelArrayI2.AddAt(vdI2, ipar); + } + // + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + binRange[idim] = fHistPoints->GetAxis(idim)->GetNbins(); + } + // + // + // + Int_t nbins = fHistPoints->GetNbins(); + fBinIndex = new Int_t[fHistPoints->GetNdimensions()]; + fBinCenter = new Double_t[fHistPoints->GetNdimensions()]; + fBinDelta = new Double_t[fHistPoints->GetNdimensions()]; + fBinWidth = new Double_t[fHistPoints->GetNdimensions()]; + + // + // 3.) + // + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + MakeRobustStatistic(values, errors, pointArray, kernelArrayI2, nvarFormula, weightCut, fRobustFractionLTS); + } + // + + // 4.) Make local fits + // + + Double_t* binHypFit = new Double_t[2 * fHistPoints->GetNdimensions()]; + // + TLinearFitter fitter(1 + 2 * nvarFormula, TString::Format("hyp%d", 2 * nvarFormula).Data()); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); + Bool_t isUnderFlowBin = kFALSE; + Bool_t isOverFlowBin = kFALSE; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + if (fBinIndex[idim] == 0) + isUnderFlowBin = kTRUE; + if (fBinIndex[idim] > binRange[idim]) + isOverFlowBin = kTRUE; + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + if (isUnderFlowBin || isOverFlowBin) + continue; + fitter.ClearPoints(); + // add fit points + for (Int_t ipoint = 0; ipoint < entriesVal; ipoint++) { + Double_t sumChi2 = 0; + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + Double_t localRMS = (*fLocalRobustStat)(ibin, 2); + Double_t localMean = (*fLocalRobustStat)(ibin, 1); + Double_t localMedian = (*fLocalRobustStat)(ibin, 0); + if (fCutType == 1) { + if (TMath::Abs(pvalues[ipoint] - localMedian) > fRobustRMSLTSCut * localRMS) + continue; + } + if (fCutType == 2) { + if (TMath::Abs(pvalues[ipoint] - localMean) > fRobustRMSLTSCut * localRMS) + continue; + } + } + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + if (useBinNorm) { + binHypFit[2 * idim] = fBinDelta[idim] / fBinWidth[idim]; + binHypFit[2 * idim + 1] = binHypFit[2 * idim] * binHypFit[2 * idim]; + } else { + binHypFit[2 * idim] = fBinDelta[idim]; + binHypFit[2 * idim + 1] = fBinDelta[idim] * fBinDelta[idim]; + } + } + if (sumChi2 > nchi2Cut) + continue; + // Double_t weight=TMath::Exp(-sumChi2*0.5); + // fitter.AddPoint(binHypFit,pvalues[ipoint], perrors[ipoint]/weight); + Double_t weightI = TMath::Exp(sumChi2 * 0.5); + fitter.AddPoint(binHypFit, pvalues[ipoint], perrors[ipoint] * weightI); + } + TVectorD* fitParam = new TVectorD(nvarFormula * 2 + 1); + TVectorD* fitQuality = new TVectorD(3); + TMatrixD* fitCovar = new TMatrixD(nvarFormula * 2 + 1, nvarFormula * 2 + 1); + Double_t normRMS = 0; + Int_t nBinPoints = fitter.GetNpoints(); + Bool_t fitOK = kFALSE; + (*fitQuality)[0] = 0; + (*fitQuality)[1] = 0; + (*fitQuality)[2] = 0; + + if (fitter.GetNpoints() > nvarFormula * 2 + 2) { + + if (fCutType == 3) + fitOK = (fitter.EvalRobust() == 0); + else + fitOK = (fitter.Eval() == 0); + + if (fitOK) { + normRMS = fitter.GetChisquare() / (fitter.GetNpoints() - fitter.GetNumberFreeParameters()); + fitter.GetParameters(*fitParam); + fitter.GetCovarianceMatrix(*fitCovar); + (*fitQuality)[0] = nBinPoints; + (*fitQuality)[1] = normRMS; + (*fitQuality)[2] = ibin; + fLocalFitParam->AddAt(fitParam, ibin); + fLocalFitQuality->AddAt(fitQuality, ibin); + fLocalFitCovar->AddAt(fitCovar, ibin); + } + } + if (fStreamer) { + TVectorD pfBinCenter(nvarFormula, fBinCenter); + Double_t median = 0, mean = 0, rms = 0; + if (fLocalRobustStat) { + median = (*fLocalRobustStat)(ibin, 0); + mean = (*fLocalRobustStat)(ibin, 1); + rms = (*fLocalRobustStat)(ibin, 2); + } + (*fStreamer) << "localFit" + << "ibin=" << ibin << // bin index + "fitOK=" << fitOK << "localMedian=" << median << "localMean=" << mean << "localRMS=" << rms << "nBinPoints=" << nBinPoints << // center of the bin + "binCenter.=" << &pfBinCenter << // + "normRMS=" << normRMS << "fitParam.=" << fitParam << "fitCovar.=" << fitCovar << "fitOK=" << fitOK << "\n"; + } + if (!fitOK) { // avoid memory leak for failed fits + delete fitParam; + delete fitQuality; + delete fitCovar; + } + } + + return kTRUE; +} diff --git a/Common/NDRegression/src/NDLocalRegression.cxx b/Common/NDRegression/src/NDRegressionLinkDef.h similarity index 64% rename from Common/NDRegression/src/NDLocalRegression.cxx rename to Common/NDRegression/src/NDRegressionLinkDef.h index 78788c3ff7134..ee149996aca0e 100644 --- a/Common/NDRegression/src/NDLocalRegression.cxx +++ b/Common/NDRegression/src/NDRegressionLinkDef.h @@ -9,19 +9,11 @@ // granted to it by virtue of its status as an Intergovernmental Organization // or submit itself to any jurisdiction. -#include -#include "NDLocalRegression/NDLocalRegression.h" +#ifdef __CLING__ +#pragma link off all globals; +#pragma link off all classes; +#pragma link off all functions; -ClassImp(o2::NDRegression::NDLocalRegression); +#pragma link C++ class o2::nd_regression::NDRegression + ; -namespace o2 -{ -namespace nd_local_regression -{ -bool NDLocalRegression::init() -{ - std::cout << "Init done" << endl; - return true; -} -} // namespace nd_local_regression -} // namespace o2 +#endif diff --git a/Common/NDRegression/test/NDLocalRegressionTest.C b/Common/NDRegression/test/NDLocalRegressionTest.C deleted file mode 100644 index 6b366df036ec7..0000000000000 --- a/Common/NDRegression/test/NDLocalRegressionTest.C +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019-2020 CERN and copyright holders of ALICE O2. -// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. -// All rights not expressly granted are reserved. -// -// This software is distributed under the terms of the GNU General Public -// License v3 (GPL Version 3), copied verbatim in the file "COPYING". -// -// In applying this license CERN does not waive the privileges and immunities -// granted to it by virtue of its status as an Intergovernmental Organization -// or submit itself to any jurisdiction. - -#define BOOST_TEST_MODULE Test NDLocalRegressionTest -#define BOOST_TEST_MAIN -#define BOOST_TEST_DYN_LINK -#include -#include "NDLocalRegression/NDLocalRegression.h" - -BOOST_AUTO_TEST_CASE(NDLocalRegressionTest_test) -{ - auto fitter = o2::nd_local_regression::NDLocalRegression(); - auto success = fitter.init(); - - BOOST_CHECK(success == true); -} diff --git a/Common/NDRegression/test/NDRegression2DIdeal.cxx b/Common/NDRegression/test/NDRegression2DIdeal.cxx new file mode 100644 index 0000000000000..691c7927acb14 --- /dev/null +++ b/Common/NDRegression/test/NDRegression2DIdeal.cxx @@ -0,0 +1,119 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDRegression2DIdeal +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK + +#include +#include "NDRegression/NDRegression.h" +#include "TRandom.h" +#include "TFile.h" +#include "THn.h" + +// namespace o2::utils +// { +// class TreeStreamRedirector; +// } + +// using o2::utils::TreeStreamRedirector; +using std::make_shared; +using std::make_unique; + +BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) +{ + auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); + auto success = pfitNDIdeal->init(); + + BOOST_CHECK(success == true); + + // + // 0.) Initialization of variables and THn + // + Int_t ndim = 2; + Double_t err = 0.1; + + o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); + auto pcstreamOutIdeal = make_shared("fitNDLocalTestOutputIdeal.root", "recreate"); + + Double_t* xyz = new Double_t[ndim]; + Double_t* sxyz = new Double_t[ndim]; + Int_t* nbins = new Int_t[ndim]; + Double_t* xmin = new Double_t[ndim]; + Double_t* xmax = new Double_t[ndim]; + TString** chxyz = new TString*[ndim]; + for (Int_t idim = 0; idim < ndim; idim++) { + chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); + nbins[idim] = 40; + xmin[idim] = 0; + xmax[idim] = 1; + } + + auto pformula = make_shared("pformula", "cos(7*x[0]/pi)*sin(11*x[1]/pi)"); + auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); + // + // 1.) generate random input points + // + for (Int_t ipoint = 0; ipoint < 1e4; ipoint++) { + for (Int_t idim = 0; idim < ndim; idim++) { + xyz[idim] = gRandom->Rndm(); + } + Double_t value = pformula->EvalPar(xyz, 0); + Double_t noise = gRandom->Gaus() * err; + Double_t noise2 = noise * (1 + (gRandom->Rndm() < 0.1) * 100); // noise with 10 percent of outliers + Double_t noiseBreit = gRandom->BreitWigner() * err; + pcstreamIn << "testInput" + << "val=" << value << "err=" << err << "noise=" << noise << // gausian noise + "noise2=" << noise2 << // gausian noise + 10% of outliers + "noiseBreit=" << noiseBreit; + for (Int_t idim = 0; idim < ndim; idim++) { + pcstreamIn << "testInput" << chxyz[idim]->Data() << xyz[idim]; + } + pcstreamIn << "testInput" + << "\n"; + } + pcstreamIn.Close(); + std::cout << "testfile done"; + + TFile inpf("fitNDLocalTestInput.root"); + BOOST_CHECK(!inpf.IsZombie()); + auto treeIn = (TTree*)(inpf.GetFile()->Get("testInput")); + BOOST_CHECK(treeIn); + + pfitNDIdeal->SetStreamer(pcstreamOutIdeal); + + success = pfitNDIdeal->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + + success = pfitNDIdeal->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); + BOOST_CHECK(success); + + treeIn->Draw("(AliNDLocalRegression::GetCorrND(5,xyz0,xyz1)-AliNDLocalRegression::GetCorrND(4,xyz0,xyz1))/sqrt(AliNDLocalRegression::GetCorrNDError(4,xyz0,xyz1)**2+AliNDLocalRegression::GetCorrNDError(5,xyz0,xyz1)**2)>>pullsBreiWigner54(401,-20.5,20.5)", "", ""); + TH1F* pullsBreiWigner54 = (TH1F*)gPad->GetPrimitive("pullsBreiWigner54"); + Double_t meanPullsBreitWigner = treeIn->GetHistogram()->GetMean(); + Double_t meanPullsBreitWignerErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsPullsBreitWigner = treeIn->GetHistogram()->GetRMS(); + Double_t rmsPullsBreitWignerErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanPullsBreitWigner) < 10 * meanPullsBreitWignerErr) { + ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); + } else { + ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull NOT OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); + } + if (rmsPullsBreitWigner < 1 + rmsPullsBreitWignerErr) { + ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); + } else { + ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull NOT OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); + } + PlotData(pullsBreiWigner54, "BreitWigner pulls", "counts (arb. units)", kBlue + 2, "zTitle", rmsPullsBreitWigner, rmsPullsBreitWignerErr, meanPullsBreitWigner, meanPullsBreitWignerErr); + canvasUnitBreitWigner->SaveAs("AliNDLocalRegressionTest.canvasUnitBreitWigner.png"); + + inpf.Close(); +} From 5ef275ed65883e62745ed8543216cd32273e5855 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Tue, 12 Apr 2022 11:31:42 +0200 Subject: [PATCH 5/8] Save state --- Common/NDRegression/src/NDRegression.cxx | 63 ++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/Common/NDRegression/src/NDRegression.cxx b/Common/NDRegression/src/NDRegression.cxx index deb772c627ec1..74d71bc20e807 100644 --- a/Common/NDRegression/src/NDRegression.cxx +++ b/Common/NDRegression/src/NDRegression.cxx @@ -465,3 +465,66 @@ Bool_t NDRegression::MakeFit(TTree* tree, const char* formulaVal, const char* fo return kTRUE; } + +void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) +{ + // + // + gStyle->SetPadRightMargin(0.05); + gStyle->SetPadTopMargin(0.05); + gStyle->SetPadLeftMargin(0.14); + gStyle->SetPadBottomMargin(0.12); + gStyle->SetPadTickX(1); + gStyle->SetPadTickY(1); + gStyle->SetPadGridX(1); + gStyle->SetPadGridY(1); + gStyle->SetOptStat(0); + // + if (color == (kRed + 2)) { + hData->SetMarkerStyle(20); + } + if (color == (kBlue + 2)) { + hData->SetMarkerStyle(21); + } + if (color == (kGreen + 2)) { + hData->SetMarkerStyle(22); + hData->SetMarkerSize(1.3); + } + + hData->SetMarkerColor(color); + hData->SetLineColor(color); + hData->GetXaxis()->SetTitle(xTitle.Data()); + hData->GetYaxis()->SetTitle(yTitle.Data()); + hData->GetZaxis()->SetTitle(zTitle.Data()); + hData->GetXaxis()->SetTitleOffset(1.2); + hData->GetXaxis()->SetTitleSize(0.05); + hData->GetYaxis()->SetTitleOffset(1.3); + hData->GetYaxis()->SetTitleSize(0.05); + hData->GetXaxis()->SetLabelSize(0.035); + hData->GetYaxis()->SetLabelSize(0.035); + hData->GetXaxis()->SetDecimals(); + hData->GetYaxis()->SetDecimals(); + hData->GetZaxis()->SetDecimals(); + hData->Sumw2(); + hData->Draw("pe1"); + + if (mean != 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); + text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); + text1->Draw(); + } + if (rms != 999999. && mean == 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("RMS: %0.2f", rms)); + text1->Draw(); + } +} From 55ba5055af6252d0863c7ad3588f1a785a612f65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Fri, 29 Apr 2022 11:13:58 +0200 Subject: [PATCH 6/8] Plot tests --- Common/NDRegression/CMakeLists.txt | 6 + .../include/NDRegression/NDRegression.h | 35 +++ Common/NDRegression/src/NDRegression.cxx | 245 +++++++++++++----- .../NDRegression/test/NDRegression2DGaus.cxx | 216 +++++++++++++++ .../NDRegression/test/NDRegression2DIdeal.cxx | 158 +++++++++-- 5 files changed, 576 insertions(+), 84 deletions(-) create mode 100644 Common/NDRegression/test/NDRegression2DGaus.cxx diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt index b7c407772bc36..4d7beee13aa97 100644 --- a/Common/NDRegression/CMakeLists.txt +++ b/Common/NDRegression/CMakeLists.txt @@ -37,3 +37,9 @@ o2_add_test( COMPONENT_NAME NDRegression PUBLIC_LINK_LIBRARIES O2::NDRegression LABELS init tfile ttree histo makefit) +o2_add_test( + NDRegression2DGaus + SOURCES test/NDRegression2DGaus.cxx + COMPONENT_NAME NDRegression + PUBLIC_LINK_LIBRARIES O2::NDRegression + LABELS init tfile ttree histo makefit) diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h index 7eb718e5b7bf3..c3038cd14a7ec 100644 --- a/Common/NDRegression/include/NDRegression/NDRegression.h +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -67,6 +67,33 @@ class NDRegression : public TNamed Bool_t MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction); // Bool_t MakeFit(TTree * tree , const char *formulaVal, const char * formulaVar, const char*selection, const char * formulaKernel, const char * dimensionFormula, Double_t weightCut=0.00001, Int_t entries=1000000000, Bool_t useBinNorm=kTRUE); + + static Double_t GetCorrND(Double_t index, Double_t par0); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + static Double_t GetCorrNDError(Double_t index, Double_t par0); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + Double_t Eval(Double_t *point); + Double_t EvalError(Double_t *point); + + // function to access the Local Regression from the TFormula + static void AddVisualCorrection(NDRegression *corr, + Int_t position = 0); + static Int_t GetVisualCorrectionIndex(const char *corName); + Int_t GetVisualCorrectionIndex() { + return GetVisualCorrectionIndex(GetName()); + } + static NDRegression *GetVisualCorrection(Int_t position); + static NDRegression *GetVisualCorrection(const char *corName) { + return (fgVisualCorrection == NULL) + ? 0 + : (NDRegression *)fgVisualCorrection->FindObject( + corName); + } + static TObjArray *GetVisualCorrections() { return fgVisualCorrection; } protected: shared_ptr fStreamer; // ! streamer to keep - test intermediate data @@ -78,6 +105,7 @@ class NDRegression : public TNamed TMatrixD* fLocalRobustStat; // local robust statistic + Int_t fNParameters; // number of local parameters to fit Int_t* fBinIndex; //[fNParameters] working arrays current bin index Double_t* fBinCenter; //[fNParameters] working current local variables - bin center Double_t* fBinDelta; //[fNParameters] working current local variables - bin delta @@ -88,12 +116,19 @@ class NDRegression : public TNamed Double_t fRobustFractionLTS; // fraction of data used for the robust mean and robust rms estimator (LTS https://en.wikipedia.org/wiki/Least_trimmed_squares) Double_t fRobustRMSLTSCut; // cut on the robust RMS |value-localmean|GetName()); + } + + if (!fgVisualCorrection) + fgVisualCorrection = new TObjArray(1000000); + if (position >= fgVisualCorrection->GetEntriesFast()) + fgVisualCorrection->Expand((position + 10) * 2); + if (fgVisualCorrection->At(position) != NULL) { + ::Error("NDRegression::AddVisualCorrection", + "Correction %d already defined Old: %s New: %s", position, + fgVisualCorrection->At(position)->GetName(), corr->GetName()); + } + fgVisualCorrection->AddAt(corr, position); +} + +NDRegression * +NDRegression::GetVisualCorrection(Int_t position) { + /// Get visula correction registered at index=position + return fgVisualCorrection + ? (NDRegression *)fgVisualCorrection->At(position) + : 0; +} + Double_t NDRegression::GetCorrND(Double_t index, Double_t par0) { // @@ -492,6 +537,7 @@ Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, Double_t par1) { // + // NDRegression* corr = (NDRegression*)fgVisualCorrection->At(index); @@ -513,65 +559,142 @@ Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, return corr->EvalError(par); } -void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) -{ + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, + Double_t par1, Double_t par2) { + // + // + NDRegression *corr = + (NDRegression *)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[3] = {par0, par1, par2}; + return corr->Eval(par); +} + +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, + Double_t par1, Double_t par2) { + // + // + NDRegression *corr = + (NDRegression *)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[3] = {par0, par1, par2}; + return corr->EvalError(par); +} + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, + Double_t par1, Double_t par2, + Double_t par3) { + // + // + NDRegression *corr = + (NDRegression *)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[4] = {par0, par1, par2, par3}; + return corr->Eval(par); +} + +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, + Double_t par1, Double_t par2, + Double_t par3) { // // - gStyle->SetPadRightMargin(0.05); - gStyle->SetPadTopMargin(0.05); - gStyle->SetPadLeftMargin(0.14); - gStyle->SetPadBottomMargin(0.12); - gStyle->SetPadTickX(1); - gStyle->SetPadTickY(1); - gStyle->SetPadGridX(1); - gStyle->SetPadGridY(1); - gStyle->SetOptStat(0); - // - if (color == (kRed + 2)) { - hData->SetMarkerStyle(20); - } - if (color == (kBlue + 2)) { - hData->SetMarkerStyle(21); - } - if (color == (kGreen + 2)) { - hData->SetMarkerStyle(22); - hData->SetMarkerSize(1.3); - } - - hData->SetMarkerColor(color); - hData->SetLineColor(color); - hData->GetXaxis()->SetTitle(xTitle.Data()); - hData->GetYaxis()->SetTitle(yTitle.Data()); - hData->GetZaxis()->SetTitle(zTitle.Data()); - hData->GetXaxis()->SetTitleOffset(1.2); - hData->GetXaxis()->SetTitleSize(0.05); - hData->GetYaxis()->SetTitleOffset(1.3); - hData->GetYaxis()->SetTitleSize(0.05); - hData->GetXaxis()->SetLabelSize(0.035); - hData->GetYaxis()->SetLabelSize(0.035); - hData->GetXaxis()->SetDecimals(); - hData->GetYaxis()->SetDecimals(); - hData->GetZaxis()->SetDecimals(); - hData->Sumw2(); - hData->Draw("pe1"); - - if (mean != 999999.) { - TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); - text1->SetTextFont(43); - text1->SetTextSize(30.); - text1->SetBorderSize(1); - text1->SetFillColor(kWhite); - text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); - text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); - text1->Draw(); - } - if (rms != 999999. && mean == 999999.) { - TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); - text1->SetTextFont(43); - text1->SetTextSize(30.); - text1->SetBorderSize(1); - text1->SetFillColor(kWhite); - text1->AddText(Form("RMS: %0.2f", rms)); - text1->Draw(); + NDRegression *corr = + (NDRegression *)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[4] = {par0, par1, par2, par3}; + return corr->EvalError(par); +} + + +Double_t NDRegression::Eval(Double_t *point) { + // + // + // + const Double_t almost0 = 0.00000001; + // backward compatibility + if (!fBinWidth) { + fBinWidth = new Double_t[fHistPoints->GetNdimensions()]; + } + + for (Int_t iDim = 0; iDim < fNParameters; iDim++) { + if (point[iDim] <= fHistPoints->GetAxis(iDim)->GetXmin()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmin() + + almost0 * fHistPoints->GetAxis(iDim)->GetBinWidth(0); + if (point[iDim] >= fHistPoints->GetAxis(iDim)->GetXmax()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmax() - + almost0 * fHistPoints->GetAxis(iDim)->GetBinWidth(0); + } + + Int_t ibin = fHistPoints->GetBin(point); + Bool_t rangeOK = kTRUE; + if (ibin >= fLocalFitParam->GetEntriesFast()) { + rangeOK = kFALSE; + } else { + if (fLocalFitParam->UncheckedAt(ibin) == NULL) { + rangeOK = kFALSE; + } } + if (!rangeOK) + return 0; + + fHistPoints->GetBinContent(ibin, fBinIndex); + for (Int_t idim = 0; idim < fNParameters; idim++) { + fBinCenter[idim] = + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + TVectorD &vecParam = *((TVectorD *)fLocalFitParam->At(ibin)); + Double_t value = vecParam[0]; + if (!rangeOK) + return value; + if (fUseBinNorm) { + for (Int_t ipar = 0; ipar < fNParameters; ipar++) { + Double_t delta = (point[ipar] - fBinCenter[ipar]) / fBinWidth[ipar]; + value += + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + } + } else { + for (Int_t ipar = 0; ipar < fNParameters; ipar++) { + Double_t delta = (point[ipar] - fBinCenter[ipar]); + value += + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + } + } + return value; +} + +Double_t NDRegression::EvalError(Double_t *point) { + // + // + // + if (fLocalFitCovar == NULL) { + ::Error("NDRegression::EvalError", + "Covariance matrix not available"); + return 0; + } + for (Int_t iDim = 0; iDim < fNParameters; iDim++) { + if (point[iDim] < fHistPoints->GetAxis(iDim)->GetXmin()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmin(); + if (point[iDim] > fHistPoints->GetAxis(iDim)->GetXmax()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmax(); + } + + Int_t ibin = fHistPoints->GetBin(point); + if (fLocalFitParam->At(ibin) == NULL) + return 0; + fHistPoints->GetBinContent(ibin, fBinIndex); + for (Int_t idim = 0; idim < fNParameters; idim++) { + fBinCenter[idim] = + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + } + TMatrixD &vecCovar = *((TMatrixD *)fLocalFitCovar->At(ibin)); + // TVectorD &vecQuality = *((TVectorD*)fLocalFitQuality->At(ibin)); + Double_t value = TMath::Sqrt(vecCovar(0, 0)); // fill covariance to be used + return value; } + diff --git a/Common/NDRegression/test/NDRegression2DGaus.cxx b/Common/NDRegression/test/NDRegression2DGaus.cxx new file mode 100644 index 0000000000000..c766ef2bb61e3 --- /dev/null +++ b/Common/NDRegression/test/NDRegression2DGaus.cxx @@ -0,0 +1,216 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDRegression2DGaus +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK + +#include +#include "NDRegression/NDRegression.h" +#include "TRandom.h" +#include "TFile.h" +#include "THn.h" + +// namespace o2::utils +// { +// class TreeStreamRedirector; +// } + +// using o2::utils::TreeStreamRedirector; +using o2::nd_regression::NDRegression; +using std::make_shared; +using std::make_unique; + +void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) +{ + // + // + // gStyle->SetPadRightMargin(0.05); + // gStyle->SetPadTopMargin(0.05); + // gStyle->SetPadLeftMargin(0.24); + // gStyle->SetPadBottomMargin(0.22); + // gStyle->SetPadTickX(1); + // gStyle->SetPadTickY(1); + // gStyle->SetPadGridX(1); + // gStyle->SetPadGridY(1); + // gStyle->SetOptStat(0); + gPad->SetRightMargin(0.05); + gPad->SetTopMargin(0.05); + gPad->SetLeftMargin(0.14); + gPad->SetBottomMargin(0.12); + gPad->SetTicks(1); + gPad->SetGrid(1); + gPad->SetObjectStat(0); + // + if (color == (kRed + 2)) { + hData->SetMarkerStyle(20); + } + if (color == (kBlue + 2)) { + hData->SetMarkerStyle(21); + } + if (color == (kGreen + 2)) { + hData->SetMarkerStyle(22); + } + hData->SetMarkerSize(0.7); + + hData->SetMarkerColor(color); + hData->SetLineColor(color); + hData->GetXaxis()->SetTitle(xTitle.Data()); + hData->GetYaxis()->SetTitle(yTitle.Data()); + hData->GetZaxis()->SetTitle(zTitle.Data()); + hData->GetXaxis()->SetTitleOffset(1.2); + hData->GetXaxis()->SetTitleSize(0.05); + hData->GetYaxis()->SetTitleOffset(1.3); + hData->GetYaxis()->SetTitleSize(0.05); + hData->GetXaxis()->SetLabelSize(0.035); + hData->GetYaxis()->SetLabelSize(0.035); + hData->GetXaxis()->SetDecimals(); + hData->GetYaxis()->SetDecimals(); + hData->GetZaxis()->SetDecimals(); + hData->Sumw2(); + hData->Draw("pe1 same"); + // hData->Draw("l same"); + + if (mean != 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); + text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); + text1->Draw(); + } + if (rms != 999999. && mean == 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("RMS: %0.2f", rms)); + text1->Draw(); + } +} + +BOOST_AUTO_TEST_CASE(NDRegression2DGaus_test) +{ + auto pfitNDGaus0 = make_unique("pfitNDGaus0", "pfitNDGaus0"); + + auto success = pfitNDGaus0->init(); + BOOST_CHECK(success == true); + + // + // 0.) Initialization of variables and THn + // + Int_t ndim = 2; + Double_t err = 0.1; + + o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); + auto pcstreamOutGaus0 = make_shared("fitNDLocalTestOutputGaus0.root", "recreate"); + + Double_t* xyz = new Double_t[ndim]; + Double_t* sxyz = new Double_t[ndim]; + Int_t* nbins = new Int_t[ndim]; + Double_t* xmin = new Double_t[ndim]; + Double_t* xmax = new Double_t[ndim]; + TString** chxyz = new TString*[ndim]; + for (Int_t idim = 0; idim < ndim; idim++) { + chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); + nbins[idim] = 70; + xmin[idim] = -1; + xmax[idim] = 2; + } + + auto pformula = make_shared("pformula", "cos(7*x[0])*cos(3*x[1])+pow(x[0], x[1])"); + auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); + // + // 1.) generate random input points + // + for (Int_t ipoint = 0; ipoint < 1e4; ipoint++) { + for (Int_t idim = 0; idim < ndim; idim++) { + xyz[idim] = gRandom->Rndm(); + } + Double_t value = pformula->EvalPar(xyz, 0); + Double_t noise = gRandom->Gaus() * err; + Double_t noise2 = noise * (1 + (gRandom->Rndm() < 0.1) * 100); // noise with 10 percent of outliers + Double_t noiseBreit = gRandom->BreitWigner() * err; + pcstreamIn << "testInput" + << "val=" << value << "err=" << err << "noise=" << noise << // gausian noise + "noise2=" << noise2 << // gausian noise + 10% of outliers + "noiseBreit=" << noiseBreit; + for (Int_t idim = 0; idim < ndim; idim++) { + pcstreamIn << "testInput" << chxyz[idim]->Data() << xyz[idim]; + } + pcstreamIn << "testInput" + << "\n"; + } + pcstreamIn.Close(); + std::cout << "testfile done\n"; + + TFile inpf("fitNDLocalTestInput.root"); + BOOST_CHECK(!inpf.IsZombie()); + auto treeIn = (TTree*)(inpf.GetFile()->Get("testInput")); + BOOST_CHECK(treeIn); + + pfitNDGaus0->SetStreamer(pcstreamOutGaus0); + + + success = pfitNDGaus0->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + + success = pfitNDGaus0->MakeFit(treeIn, "val+noise:err", "xyz0:xyz1","Entry$%2==1", "0.02:0.02","2:2",0.0001); // sample Gaussian1 + BOOST_CHECK(success); + + std::cout << "Now drawing...\n"; + + pfitNDGaus0->AddVisualCorrection(pfitNDGaus0.get(),1); + + TObjArray * array = NDRegression::GetVisualCorrections(); + for (Int_t i=0; iGetEntries(); i++){ + NDRegression * regression = ( NDRegression *)array->At(i); + if (regression==NULL) continue; + regression->AddVisualCorrection(regression); + Int_t hashIndex = regression->GetVisualCorrectionIndex(); + treeIn->SetAlias( regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)",hashIndex).Data()); + } + pcstreamOutGaus0.reset(); + std::cout << "Entries: " << array->GetEntries() << std::endl; + + + TCanvas * canvasGaus = new TCanvas("canvasGaus","canvasGaus",800,800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)","",""); + TH1F *inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>gaus(71,-1.1,2.1)","",""); + // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); + TH1F *gaus = (TH1F*)gPad->GetPrimitive("gaus"); + Double_t meanGaus = treeIn->GetHistogram()->GetMean(); + Double_t meanGausErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsGaus = treeIn->GetHistogram()->GetRMS(); + Double_t rmsGausErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanGaus) <10*meanGausErr) { + ::Info( "NDRegression2DGausTest","mean pull OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + }else{ + ::Error( "NDRegressionTest","mean pull NOT OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + } + if (rmsGaus<1+rmsGausErr) { + ::Info( "NDRegressionTest"," rms pull OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + }else{ + ::Error( "NDRegressionTest"," rms pull NOT OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + } + + + + inputData->Draw("same"); + PlotData(gaus,"Gaus","counts (arb. units)",kRed+2,"zTitle",rmsGaus,rmsGausErr,meanGaus,meanGausErr); + canvasGaus->SaveAs("NDRegressionTest.canvasGausTest.png"); + + inpf.Close(); +} diff --git a/Common/NDRegression/test/NDRegression2DIdeal.cxx b/Common/NDRegression/test/NDRegression2DIdeal.cxx index a19535ba83795..54eaf0aec2731 100644 --- a/Common/NDRegression/test/NDRegression2DIdeal.cxx +++ b/Common/NDRegression/test/NDRegression2DIdeal.cxx @@ -25,14 +25,90 @@ // } // using o2::utils::TreeStreamRedirector; +using o2::nd_regression::NDRegression; using std::make_shared; using std::make_unique; +void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) +{ + // + // + // gStyle->SetPadRightMargin(0.05); + // gStyle->SetPadTopMargin(0.05); + // gStyle->SetPadLeftMargin(0.24); + // gStyle->SetPadBottomMargin(0.22); + // gStyle->SetPadTickX(1); + // gStyle->SetPadTickY(1); + // gStyle->SetPadGridX(1); + // gStyle->SetPadGridY(1); + // gStyle->SetOptStat(0); + gPad->SetRightMargin(0.05); + gPad->SetTopMargin(0.05); + gPad->SetLeftMargin(0.14); + gPad->SetBottomMargin(0.12); + gPad->SetTicks(1); + gPad->SetGrid(1); + gPad->SetObjectStat(0); + // + if (color == (kRed + 2)) { + hData->SetMarkerStyle(20); + } + if (color == (kBlue + 2)) { + hData->SetMarkerStyle(21); + } + if (color == (kGreen + 2)) { + hData->SetMarkerStyle(22); + } + hData->SetMarkerSize(0.7); + + hData->SetMarkerColor(color); + hData->SetLineColor(color); + hData->GetXaxis()->SetTitle(xTitle.Data()); + hData->GetYaxis()->SetTitle(yTitle.Data()); + hData->GetZaxis()->SetTitle(zTitle.Data()); + hData->GetXaxis()->SetTitleOffset(1.2); + hData->GetXaxis()->SetTitleSize(0.05); + hData->GetYaxis()->SetTitleOffset(1.3); + hData->GetYaxis()->SetTitleSize(0.05); + hData->GetXaxis()->SetLabelSize(0.035); + hData->GetYaxis()->SetLabelSize(0.035); + hData->GetXaxis()->SetDecimals(); + hData->GetYaxis()->SetDecimals(); + hData->GetZaxis()->SetDecimals(); + hData->Sumw2(); + hData->Draw("pe1 same"); + // hData->Draw("l same"); + + if (mean != 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); + text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); + text1->Draw(); + } + if (rms != 999999. && mean == 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("RMS: %0.2f", rms)); + text1->Draw(); + } +} + + BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) { - auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); - auto success = pfitNDIdeal->init(); + auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); + auto pfitNDIdeal2 = make_unique("pfitNDIdeal2", "pfitNDIdeal2"); + auto success = pfitNDIdeal->init(); + BOOST_CHECK(success == true); + success = pfitNDIdeal2->init(); BOOST_CHECK(success == true); // @@ -43,6 +119,7 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); auto pcstreamOutIdeal = make_shared("fitNDLocalTestOutputIdeal.root", "recreate"); + auto pcstreamOutIdeal2 = make_shared("fitNDLocalTestOutputIdeal2.root", "recreate"); Double_t* xyz = new Double_t[ndim]; Double_t* sxyz = new Double_t[ndim]; @@ -52,12 +129,12 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) TString** chxyz = new TString*[ndim]; for (Int_t idim = 0; idim < ndim; idim++) { chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); - nbins[idim] = 40; - xmin[idim] = 0; - xmax[idim] = 1; + nbins[idim] = 70; + xmin[idim] = -1; + xmax[idim] = 2; } - auto pformula = make_shared("pformula", "cos(7*x[0]/pi)*sin(11*x[1]/pi)"); + auto pformula = make_shared("pformula", "cos(7*x[0])*cos(3*x[1])+pow(x[0], x[1])"); auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); // // 1.) generate random input points @@ -81,7 +158,7 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) << "\n"; } pcstreamIn.Close(); - std::cout << "testfile done"; + std::cout << "testfile done\n"; TFile inpf("fitNDLocalTestInput.root"); BOOST_CHECK(!inpf.IsZombie()); @@ -89,31 +166,66 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) BOOST_CHECK(treeIn); pfitNDIdeal->SetStreamer(pcstreamOutIdeal); + pfitNDIdeal2->SetStreamer(pcstreamOutIdeal2); + success = pfitNDIdeal->SetHistogram((THn*)((hN->Clone()))); BOOST_CHECK(success); + success = pfitNDIdeal2->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); success = pfitNDIdeal->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); BOOST_CHECK(success); + success = pfitNDIdeal2->MakeFit(treeIn, "val:err", "xyz0:xyz1","Entry$%2==1", "0.08:0.08","2:2",0.0001); // sample Gaussian1 + BOOST_CHECK(success); + + std::cout << "Now drawing...\n"; - treeIn->Draw("(AliNDLocalRegression::GetCorrND(5,xyz0,xyz1)-AliNDLocalRegression::GetCorrND(4,xyz0,xyz1))/sqrt(AliNDLocalRegression::GetCorrNDError(4,xyz0,xyz1)**2+AliNDLocalRegression::GetCorrNDError(5,xyz0,xyz1)**2)>>pullsBreiWigner54(401,-20.5,20.5)", "", ""); - TH1F* pullsBreiWigner54 = (TH1F*)gPad->GetPrimitive("pullsBreiWigner54"); - Double_t meanPullsBreitWigner = treeIn->GetHistogram()->GetMean(); - Double_t meanPullsBreitWignerErr = treeIn->GetHistogram()->GetMeanError(); - Double_t rmsPullsBreitWigner = treeIn->GetHistogram()->GetRMS(); - Double_t rmsPullsBreitWignerErr = treeIn->GetHistogram()->GetRMSError(); - if (TMath::Abs(meanPullsBreitWigner) < 10 * meanPullsBreitWignerErr) { - ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); - } else { - ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", "mean pull NOT OK %3.3f\t+-%3.3f", meanPullsBreitWigner, meanPullsBreitWignerErr); + pfitNDIdeal->AddVisualCorrection(pfitNDIdeal.get(),1); + pfitNDIdeal2->AddVisualCorrection(pfitNDIdeal2.get(),2); + + TObjArray * array = NDRegression::GetVisualCorrections(); + for (Int_t i=0; iGetEntries(); i++){ + NDRegression * regression = ( NDRegression *)array->At(i); + if (regression==NULL) continue; + regression->AddVisualCorrection(regression); + Int_t hashIndex = regression->GetVisualCorrectionIndex(); + treeIn->SetAlias( regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)",hashIndex).Data()); + } + pcstreamOutIdeal.reset(); + pcstreamOutIdeal2.reset(); + std::cout << "Entries: " << array->GetEntries() << std::endl; + + + TCanvas * canvasIdeal = new TCanvas("canvasIdeal","canvasIdeal",800,800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)","",""); + TH1F *inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>ideal(71,-1.1,2.1)","",""); + // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); + TH1F *ideal = (TH1F*)gPad->GetPrimitive("ideal"); + Double_t meanIdeal = treeIn->GetHistogram()->GetMean(); + Double_t meanIdealErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsIdeal = treeIn->GetHistogram()->GetRMS(); + Double_t rmsIdealErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanIdeal) <10*meanIdealErr) { + ::Info( "NDRegression2DIdealTest","mean pull OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); + }else{ + ::Error( "NDRegressionTest","mean pull NOT OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); } - if (rmsPullsBreitWigner < 1 + rmsPullsBreitWignerErr) { - ::Info("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); - } else { - ::Error("AliNDLocalRegressionTest::UnitTestBreitWignerNoise", " rms pull NOT OK %3.3f\t+-%3.3f", rmsPullsBreitWigner, rmsPullsBreitWignerErr); + if (rmsIdeal<1+rmsIdealErr) { + ::Info( "NDRegressionTest"," rms pull OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); + }else{ + ::Error( "NDRegressionTest"," rms pull NOT OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); } - // PlotData(pullsBreiWigner54, "BreitWigner pulls", "counts (arb. units)", kBlue + 2, "zTitle", rmsPullsBreitWigner, rmsPullsBreitWignerErr, meanPullsBreitWigner, meanPullsBreitWignerErr); - // canvasUnitBreitWigner->SaveAs("AliNDLocalRegressionTest.canvasUnitBreitWigner.png"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))>>ideal2(71,-1.1,2.1)","",""); + TH1F *ideal2 = (TH1F*)gPad->GetPrimitive("ideal2"); + + + + inputData->Draw("same"); + PlotData(ideal,"Ideal","counts (arb. units)",kRed+2,"zTitle",rmsIdeal,rmsIdealErr,meanIdeal,meanIdealErr); + PlotData(ideal2,"Ideal","counts (arb. units)",kGreen+2,"zTitle"); + canvasIdeal->SaveAs("NDRegressionTest.canvasIdealTest.png"); inpf.Close(); } From 0cc40c32c6e10e7b3e08445f458b2e071f257998 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Mon, 18 Jul 2022 13:21:36 +0200 Subject: [PATCH 7/8] Commenting --- Common/NDRegression/include/NDRegression/NDRegression.h | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h index c3038cd14a7ec..8c15b93e3be65 100644 --- a/Common/NDRegression/include/NDRegression/NDRegression.h +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -12,6 +12,14 @@ /// \file NDRegression.h /// \author Gabor Biro, biro.gabor@wigner.hu +/* + Related task: https://alice.its.cern.ch/jira/browse/ATO-569 + + The following incomplete implementation is largely based on the original + AliNDLocalRegression class, described by the following task: + https://alice.its.cern.ch/jira/browse/ATO-193 +*/ + #ifndef ALICEO2_NDREGRESSION_H #define ALICEO2_NDREGRESSION_H @@ -36,6 +44,7 @@ class TreeStreamRedirector; class THn; + namespace o2 { From 7f33da272f17b214894bb1300cb132c03f782485 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20B=C3=ADr=C3=B3?= Date: Thu, 1 Sep 2022 13:57:23 +0200 Subject: [PATCH 8/8] fix format errors --- .../include/NDRegression/NDRegression.h | 47 ++++++------ Common/NDRegression/src/NDRegression.cxx | 73 ++++++++++--------- .../NDRegression/test/NDRegression2DGaus.cxx | 49 ++++++------- .../NDRegression/test/NDRegression2DIdeal.cxx | 58 +++++++-------- 4 files changed, 111 insertions(+), 116 deletions(-) diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h index 8c15b93e3be65..fd5a1e4148ba8 100644 --- a/Common/NDRegression/include/NDRegression/NDRegression.h +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -44,7 +44,6 @@ class TreeStreamRedirector; class THn; - namespace o2 { @@ -76,33 +75,35 @@ class NDRegression : public TNamed Bool_t MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction); // Bool_t MakeFit(TTree * tree , const char *formulaVal, const char * formulaVar, const char*selection, const char * formulaKernel, const char * dimensionFormula, Double_t weightCut=0.00001, Int_t entries=1000000000, Bool_t useBinNorm=kTRUE); - + static Double_t GetCorrND(Double_t index, Double_t par0); static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1); - static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2); - static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); static Double_t GetCorrNDError(Double_t index, Double_t par0); static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1); - static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2); - static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); - Double_t Eval(Double_t *point); - Double_t EvalError(Double_t *point); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + Double_t Eval(Double_t* point); + Double_t EvalError(Double_t* point); // function to access the Local Regression from the TFormula - static void AddVisualCorrection(NDRegression *corr, + static void AddVisualCorrection(NDRegression* corr, Int_t position = 0); - static Int_t GetVisualCorrectionIndex(const char *corName); - Int_t GetVisualCorrectionIndex() { + static Int_t GetVisualCorrectionIndex(const char* corName); + Int_t GetVisualCorrectionIndex() + { return GetVisualCorrectionIndex(GetName()); } - static NDRegression *GetVisualCorrection(Int_t position); - static NDRegression *GetVisualCorrection(const char *corName) { + static NDRegression* GetVisualCorrection(Int_t position); + static NDRegression* GetVisualCorrection(const char* corName) + { return (fgVisualCorrection == NULL) - ? 0 - : (NDRegression *)fgVisualCorrection->FindObject( - corName); + ? 0 + : (NDRegression*)fgVisualCorrection->FindObject( + corName); } - static TObjArray *GetVisualCorrections() { return fgVisualCorrection; } + static TObjArray* GetVisualCorrections() { return fgVisualCorrection; } protected: shared_ptr fStreamer; // ! streamer to keep - test intermediate data @@ -114,7 +115,7 @@ class NDRegression : public TNamed TMatrixD* fLocalRobustStat; // local robust statistic - Int_t fNParameters; // number of local parameters to fit + Int_t fNParameters; // number of local parameters to fit Int_t* fBinIndex; //[fNParameters] working arrays current bin index Double_t* fBinCenter; //[fNParameters] working current local variables - bin center Double_t* fBinDelta; //[fNParameters] working current local variables - bin delta @@ -125,19 +126,15 @@ class NDRegression : public TNamed Double_t fRobustFractionLTS; // fraction of data used for the robust mean and robust rms estimator (LTS https://en.wikipedia.org/wiki/Least_trimmed_squares) Double_t fRobustRMSLTSCut; // cut on the robust RMS |value-localmean|AddAt(corr, position); } -NDRegression * -NDRegression::GetVisualCorrection(Int_t position) { +NDRegression* + NDRegression::GetVisualCorrection(Int_t position) +{ /// Get visula correction registered at index=position return fgVisualCorrection - ? (NDRegression *)fgVisualCorrection->At(position) - : 0; + ? (NDRegression*)fgVisualCorrection->At(position) + : 0; } Double_t NDRegression::GetCorrND(Double_t index, Double_t par0) @@ -559,13 +561,13 @@ Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, return corr->EvalError(par); } - Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, - Double_t par1, Double_t par2) { + Double_t par1, Double_t par2) +{ // // - NDRegression *corr = - (NDRegression *)fgVisualCorrection->At(index); + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); if (!corr) return 0; Double_t par[3] = {par0, par1, par2}; @@ -573,11 +575,12 @@ Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, } Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, - Double_t par1, Double_t par2) { + Double_t par1, Double_t par2) +{ // // - NDRegression *corr = - (NDRegression *)fgVisualCorrection->At(index); + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); if (!corr) return 0; Double_t par[3] = {par0, par1, par2}; @@ -585,12 +588,13 @@ Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, } Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, - Double_t par1, Double_t par2, - Double_t par3) { + Double_t par1, Double_t par2, + Double_t par3) +{ // // - NDRegression *corr = - (NDRegression *)fgVisualCorrection->At(index); + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); if (!corr) return 0; Double_t par[4] = {par0, par1, par2, par3}; @@ -598,20 +602,21 @@ Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, } Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, - Double_t par1, Double_t par2, - Double_t par3) { + Double_t par1, Double_t par2, + Double_t par3) +{ // // - NDRegression *corr = - (NDRegression *)fgVisualCorrection->At(index); + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); if (!corr) return 0; Double_t par[4] = {par0, par1, par2, par3}; return corr->EvalError(par); } - -Double_t NDRegression::Eval(Double_t *point) { +Double_t NDRegression::Eval(Double_t* point) +{ // // // @@ -645,10 +650,10 @@ Double_t NDRegression::Eval(Double_t *point) { fHistPoints->GetBinContent(ibin, fBinIndex); for (Int_t idim = 0; idim < fNParameters; idim++) { fBinCenter[idim] = - fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); } - TVectorD &vecParam = *((TVectorD *)fLocalFitParam->At(ibin)); + TVectorD& vecParam = *((TVectorD*)fLocalFitParam->At(ibin)); Double_t value = vecParam[0]; if (!rangeOK) return value; @@ -656,19 +661,20 @@ Double_t NDRegression::Eval(Double_t *point) { for (Int_t ipar = 0; ipar < fNParameters; ipar++) { Double_t delta = (point[ipar] - fBinCenter[ipar]) / fBinWidth[ipar]; value += - (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; } } else { for (Int_t ipar = 0; ipar < fNParameters; ipar++) { Double_t delta = (point[ipar] - fBinCenter[ipar]); value += - (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; } } return value; } -Double_t NDRegression::EvalError(Double_t *point) { +Double_t NDRegression::EvalError(Double_t* point) +{ // // // @@ -690,11 +696,10 @@ Double_t NDRegression::EvalError(Double_t *point) { fHistPoints->GetBinContent(ibin, fBinIndex); for (Int_t idim = 0; idim < fNParameters; idim++) { fBinCenter[idim] = - fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); } - TMatrixD &vecCovar = *((TMatrixD *)fLocalFitCovar->At(ibin)); + TMatrixD& vecCovar = *((TMatrixD*)fLocalFitCovar->At(ibin)); // TVectorD &vecQuality = *((TVectorD*)fLocalFitQuality->At(ibin)); Double_t value = TMath::Sqrt(vecCovar(0, 0)); // fill covariance to be used return value; } - diff --git a/Common/NDRegression/test/NDRegression2DGaus.cxx b/Common/NDRegression/test/NDRegression2DGaus.cxx index c766ef2bb61e3..539b580feb1dc 100644 --- a/Common/NDRegression/test/NDRegression2DGaus.cxx +++ b/Common/NDRegression/test/NDRegression2DGaus.cxx @@ -162,54 +162,51 @@ BOOST_AUTO_TEST_CASE(NDRegression2DGaus_test) pfitNDGaus0->SetStreamer(pcstreamOutGaus0); - success = pfitNDGaus0->SetHistogram((THn*)((hN->Clone()))); BOOST_CHECK(success); - success = pfitNDGaus0->MakeFit(treeIn, "val+noise:err", "xyz0:xyz1","Entry$%2==1", "0.02:0.02","2:2",0.0001); // sample Gaussian1 + success = pfitNDGaus0->MakeFit(treeIn, "val+noise:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); // sample Gaussian1 BOOST_CHECK(success); std::cout << "Now drawing...\n"; - pfitNDGaus0->AddVisualCorrection(pfitNDGaus0.get(),1); + pfitNDGaus0->AddVisualCorrection(pfitNDGaus0.get(), 1); - TObjArray * array = NDRegression::GetVisualCorrections(); - for (Int_t i=0; iGetEntries(); i++){ - NDRegression * regression = ( NDRegression *)array->At(i); - if (regression==NULL) continue; + TObjArray* array = NDRegression::GetVisualCorrections(); + for (Int_t i = 0; i < array->GetEntries(); i++) { + NDRegression* regression = (NDRegression*)array->At(i); + if (regression == NULL) + continue; regression->AddVisualCorrection(regression); Int_t hashIndex = regression->GetVisualCorrectionIndex(); - treeIn->SetAlias( regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)",hashIndex).Data()); + treeIn->SetAlias(regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)", hashIndex).Data()); } pcstreamOutGaus0.reset(); std::cout << "Entries: " << array->GetEntries() << std::endl; - - TCanvas * canvasGaus = new TCanvas("canvasGaus","canvasGaus",800,800); - treeIn->Draw("val>>inputData(71,-1.1,2.1)","",""); - TH1F *inputData = (TH1F*)gPad->GetPrimitive("inputData"); - treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>gaus(71,-1.1,2.1)","",""); + TCanvas* canvasGaus = new TCanvas("canvasGaus", "canvasGaus", 800, 800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)", "", ""); + TH1F* inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>gaus(71,-1.1,2.1)", "", ""); // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); - TH1F *gaus = (TH1F*)gPad->GetPrimitive("gaus"); + TH1F* gaus = (TH1F*)gPad->GetPrimitive("gaus"); Double_t meanGaus = treeIn->GetHistogram()->GetMean(); Double_t meanGausErr = treeIn->GetHistogram()->GetMeanError(); Double_t rmsGaus = treeIn->GetHistogram()->GetRMS(); Double_t rmsGausErr = treeIn->GetHistogram()->GetRMSError(); - if (TMath::Abs(meanGaus) <10*meanGausErr) { - ::Info( "NDRegression2DGausTest","mean pull OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); - }else{ - ::Error( "NDRegressionTest","mean pull NOT OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + if (TMath::Abs(meanGaus) < 10 * meanGausErr) { + ::Info("NDRegression2DGausTest", "mean pull OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + } else { + ::Error("NDRegressionTest", "mean pull NOT OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); } - if (rmsGaus<1+rmsGausErr) { - ::Info( "NDRegressionTest"," rms pull OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); - }else{ - ::Error( "NDRegressionTest"," rms pull NOT OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + if (rmsGaus < 1 + rmsGausErr) { + ::Info("NDRegressionTest", " rms pull OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + } else { + ::Error("NDRegressionTest", " rms pull NOT OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); } - - - + inputData->Draw("same"); - PlotData(gaus,"Gaus","counts (arb. units)",kRed+2,"zTitle",rmsGaus,rmsGausErr,meanGaus,meanGausErr); + PlotData(gaus, "Gaus", "counts (arb. units)", kRed + 2, "zTitle", rmsGaus, rmsGausErr, meanGaus, meanGausErr); canvasGaus->SaveAs("NDRegressionTest.canvasGausTest.png"); inpf.Close(); diff --git a/Common/NDRegression/test/NDRegression2DIdeal.cxx b/Common/NDRegression/test/NDRegression2DIdeal.cxx index 54eaf0aec2731..839e768e40849 100644 --- a/Common/NDRegression/test/NDRegression2DIdeal.cxx +++ b/Common/NDRegression/test/NDRegression2DIdeal.cxx @@ -100,7 +100,6 @@ void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", } } - BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) { auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); @@ -168,7 +167,6 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) pfitNDIdeal->SetStreamer(pcstreamOutIdeal); pfitNDIdeal2->SetStreamer(pcstreamOutIdeal2); - success = pfitNDIdeal->SetHistogram((THn*)((hN->Clone()))); BOOST_CHECK(success); success = pfitNDIdeal2->SetHistogram((THn*)((hN->Clone()))); @@ -176,55 +174,53 @@ BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) success = pfitNDIdeal->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); BOOST_CHECK(success); - success = pfitNDIdeal2->MakeFit(treeIn, "val:err", "xyz0:xyz1","Entry$%2==1", "0.08:0.08","2:2",0.0001); // sample Gaussian1 + success = pfitNDIdeal2->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.08:0.08", "2:2", 0.0001); // sample Gaussian1 BOOST_CHECK(success); std::cout << "Now drawing...\n"; - pfitNDIdeal->AddVisualCorrection(pfitNDIdeal.get(),1); - pfitNDIdeal2->AddVisualCorrection(pfitNDIdeal2.get(),2); + pfitNDIdeal->AddVisualCorrection(pfitNDIdeal.get(), 1); + pfitNDIdeal2->AddVisualCorrection(pfitNDIdeal2.get(), 2); - TObjArray * array = NDRegression::GetVisualCorrections(); - for (Int_t i=0; iGetEntries(); i++){ - NDRegression * regression = ( NDRegression *)array->At(i); - if (regression==NULL) continue; + TObjArray* array = NDRegression::GetVisualCorrections(); + for (Int_t i = 0; i < array->GetEntries(); i++) { + NDRegression* regression = (NDRegression*)array->At(i); + if (regression == NULL) + continue; regression->AddVisualCorrection(regression); Int_t hashIndex = regression->GetVisualCorrectionIndex(); - treeIn->SetAlias( regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)",hashIndex).Data()); + treeIn->SetAlias(regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)", hashIndex).Data()); } pcstreamOutIdeal.reset(); pcstreamOutIdeal2.reset(); std::cout << "Entries: " << array->GetEntries() << std::endl; - - TCanvas * canvasIdeal = new TCanvas("canvasIdeal","canvasIdeal",800,800); - treeIn->Draw("val>>inputData(71,-1.1,2.1)","",""); - TH1F *inputData = (TH1F*)gPad->GetPrimitive("inputData"); - treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>ideal(71,-1.1,2.1)","",""); + TCanvas* canvasIdeal = new TCanvas("canvasIdeal", "canvasIdeal", 800, 800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)", "", ""); + TH1F* inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>ideal(71,-1.1,2.1)", "", ""); // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); - TH1F *ideal = (TH1F*)gPad->GetPrimitive("ideal"); + TH1F* ideal = (TH1F*)gPad->GetPrimitive("ideal"); Double_t meanIdeal = treeIn->GetHistogram()->GetMean(); Double_t meanIdealErr = treeIn->GetHistogram()->GetMeanError(); Double_t rmsIdeal = treeIn->GetHistogram()->GetRMS(); Double_t rmsIdealErr = treeIn->GetHistogram()->GetRMSError(); - if (TMath::Abs(meanIdeal) <10*meanIdealErr) { - ::Info( "NDRegression2DIdealTest","mean pull OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); - }else{ - ::Error( "NDRegressionTest","mean pull NOT OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); + if (TMath::Abs(meanIdeal) < 10 * meanIdealErr) { + ::Info("NDRegression2DIdealTest", "mean pull OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); + } else { + ::Error("NDRegressionTest", "mean pull NOT OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); } - if (rmsIdeal<1+rmsIdealErr) { - ::Info( "NDRegressionTest"," rms pull OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); - }else{ - ::Error( "NDRegressionTest"," rms pull NOT OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); + if (rmsIdeal < 1 + rmsIdealErr) { + ::Info("NDRegressionTest", " rms pull OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); + } else { + ::Error("NDRegressionTest", " rms pull NOT OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); } - treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))>>ideal2(71,-1.1,2.1)","",""); - TH1F *ideal2 = (TH1F*)gPad->GetPrimitive("ideal2"); - - - + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))>>ideal2(71,-1.1,2.1)", "", ""); + TH1F* ideal2 = (TH1F*)gPad->GetPrimitive("ideal2"); + inputData->Draw("same"); - PlotData(ideal,"Ideal","counts (arb. units)",kRed+2,"zTitle",rmsIdeal,rmsIdealErr,meanIdeal,meanIdealErr); - PlotData(ideal2,"Ideal","counts (arb. units)",kGreen+2,"zTitle"); + PlotData(ideal, "Ideal", "counts (arb. units)", kRed + 2, "zTitle", rmsIdeal, rmsIdealErr, meanIdeal, meanIdealErr); + PlotData(ideal2, "Ideal", "counts (arb. units)", kGreen + 2, "zTitle"); canvasIdeal->SaveAs("NDRegressionTest.canvasIdealTest.png"); inpf.Close();