diff --git a/Common/CMakeLists.txt b/Common/CMakeLists.txt index bfb4a24ff3bc9..d4fa14d96a5fa 100644 --- a/Common/CMakeLists.txt +++ b/Common/CMakeLists.txt @@ -15,5 +15,6 @@ add_subdirectory(Field) add_subdirectory(Types) add_subdirectory(Utils) add_subdirectory(SimConfig) +add_subdirectory(NDRegression) o2_data_file(COPY maps DESTINATION Common) diff --git a/Common/NDRegression/CMakeLists.txt b/Common/NDRegression/CMakeLists.txt new file mode 100644 index 0000000000000..4d7beee13aa97 --- /dev/null +++ b/Common/NDRegression/CMakeLists.txt @@ -0,0 +1,45 @@ +# Copyright 2019-2020 CERN and copyright holders of ALICE O2. +# See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +# All rights not expressly granted are reserved. +# +# This software is distributed under the terms of the GNU General Public +# License v3 (GPL Version 3), copied verbatim in the file "COPYING". +# +# In applying this license CERN does not waive the privileges and immunities +# granted to it by virtue of its status as an Intergovernmental Organization +# or submit itself to any jurisdiction. + +o2_add_library( + NDRegression + SOURCES src/NDRegression.cxx + PUBLIC_LINK_LIBRARIES + O2::MathUtils + O2::Headers + O2::CommonUtils + ROOT::Minuit + # ROOT::Hist + # FairRoot::Base + # O2::CommonConstants + # O2::GPUCommon + # ROOT::GenVector + # ROOT::Geom + # Vc::Vc + ) + +o2_target_root_dictionary( + NDRegression + HEADERS include/NDRegression/NDRegression.h + ) + +o2_add_test( + NDRegression2DIdeal + SOURCES test/NDRegression2DIdeal.cxx + COMPONENT_NAME NDRegression + PUBLIC_LINK_LIBRARIES O2::NDRegression + LABELS init tfile ttree histo makefit) +o2_add_test( + NDRegression2DGaus + SOURCES test/NDRegression2DGaus.cxx + COMPONENT_NAME NDRegression + PUBLIC_LINK_LIBRARIES O2::NDRegression + LABELS init tfile ttree histo makefit) diff --git a/Common/NDRegression/include/NDRegression/NDRegression.h b/Common/NDRegression/include/NDRegression/NDRegression.h new file mode 100644 index 0000000000000..fd5a1e4148ba8 --- /dev/null +++ b/Common/NDRegression/include/NDRegression/NDRegression.h @@ -0,0 +1,141 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +/// \file NDRegression.h +/// \author Gabor Biro, biro.gabor@wigner.hu + +/* + Related task: https://alice.its.cern.ch/jira/browse/ATO-569 + + The following incomplete implementation is largely based on the original + AliNDLocalRegression class, described by the following task: + https://alice.its.cern.ch/jira/browse/ATO-193 +*/ + +#ifndef ALICEO2_NDREGRESSION_H +#define ALICEO2_NDREGRESSION_H + +#include "TNamed.h" +#include "TMath.h" +#include "TFormula.h" +#include "TObjString.h" +#include "TString.h" +#include "TLinearFitter.h" +#include "TMinuit.h" +#include "TVectorD.h" +#include "THn.h" +#include "TH1.h" +#include "Rtypes.h" +#include +#include "CommonUtils/TreeStream.h" +#include "CommonUtils/TreeStreamRedirector.h" +#include "TStyle.h" +#include "TPaveText.h" +#include "TCanvas.h" + +class TreeStreamRedirector; +class THn; + +namespace o2 +{ + +using std::make_shared; +using std::make_unique; +using std::shared_ptr; +using std::unique_ptr; +using utils::TreeStreamRedirector; + +namespace nd_regression +{ + +class NDRegression : public TNamed +{ + public: + NDRegression() = default; + NDRegression(const char* name, const char* title); + ~NDRegression() = default; + // NDRegression(); + + bool init(); + void SetCuts(Double_t nSigma = 6, Double_t robustFraction = 0.95, Int_t estimator = 1); + + void SetStreamer(shared_ptr& streamer) { fStreamer = streamer; } + void EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub); + Bool_t SetHistogram(THn* histo); + + Bool_t MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut = 1e-5, Int_t entries = 1e9, Bool_t useBinNorm = kTRUE); + Bool_t MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction); + + // Bool_t MakeFit(TTree * tree , const char *formulaVal, const char * formulaVar, const char*selection, const char * formulaKernel, const char * dimensionFormula, Double_t weightCut=0.00001, Int_t entries=1000000000, Bool_t useBinNorm=kTRUE); + + static Double_t GetCorrND(Double_t index, Double_t par0); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrND(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + static Double_t GetCorrNDError(Double_t index, Double_t par0); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2); + static Double_t GetCorrNDError(Double_t index, Double_t par0, Double_t par1, Double_t par2, Double_t par3); + Double_t Eval(Double_t* point); + Double_t EvalError(Double_t* point); + + // function to access the Local Regression from the TFormula + static void AddVisualCorrection(NDRegression* corr, + Int_t position = 0); + static Int_t GetVisualCorrectionIndex(const char* corName); + Int_t GetVisualCorrectionIndex() + { + return GetVisualCorrectionIndex(GetName()); + } + static NDRegression* GetVisualCorrection(Int_t position); + static NDRegression* GetVisualCorrection(const char* corName) + { + return (fgVisualCorrection == NULL) + ? 0 + : (NDRegression*)fgVisualCorrection->FindObject( + corName); + } + static TObjArray* GetVisualCorrections() { return fgVisualCorrection; } + + protected: + shared_ptr fStreamer; // ! streamer to keep - test intermediate data + THn* fHistPoints; // histogram local point distoribution + + TObjArray* fLocalFitParam; // local fit parameters + RMS + chi2 + TObjArray* fLocalFitQuality; // local fit npoints chi2 + TObjArray* fLocalFitCovar; // local fit covariance matrix + + TMatrixD* fLocalRobustStat; // local robust statistic + + Int_t fNParameters; // number of local parameters to fit + Int_t* fBinIndex; //[fNParameters] working arrays current bin index + Double_t* fBinCenter; //[fNParameters] working current local variables - bin center + Double_t* fBinDelta; //[fNParameters] working current local variables - bin delta + Double_t* fBinWidth; //[fNParameters] working current local variables - bin delta + + Int_t fCutType; // type of the cut 0- no cut 1-cut localmean=median,2-cut localmen=rosbut mean + + Double_t fRobustFractionLTS; // fraction of data used for the robust mean and robust rms estimator (LTS https://en.wikipedia.org/wiki/Least_trimmed_squares) + Double_t fRobustRMSLTSCut; // cut on the robust RMS |value-localmean| +#include "NDRegression/NDRegression.h" + +using namespace o2::nd_regression; + +ClassImp(o2::nd_regression::NDRegression); + +TObjArray* NDRegression::fgVisualCorrection = 0; + +NDRegression::NDRegression(const char* name, const char* title) : TNamed(name, title) +{ + if (!fgVisualCorrection) + fgVisualCorrection = new TObjArray; +} + +Bool_t NDRegression::init() +{ + fHistPoints = NULL; + fLocalRobustStat = NULL; + + fRobustRMSLTSCut = 0.0; + fRobustFractionLTS = 0.0; + fCutType = 0; + + std::cout << "Init done" << std::endl; + return true; +} + +void NDRegression::SetCuts(Double_t nSigma, Double_t robustFraction, + Int_t estimator) +{ + // + // + // + fRobustFractionLTS = + robustFraction; // fraction of data used for the robust mean and robust + // rms estimator (LTS + // https://en.wikipedia.org/wiki/Least_trimmed_squares) + fRobustRMSLTSCut = nSigma; // cut on the robust RMS + // |value-localmean|GetNbins()); + fLocalFitParam->SetOwner(kTRUE); + fLocalFitQuality = new TObjArray(fHistPoints->GetNbins()); + fLocalFitQuality->SetOwner(kTRUE); + fLocalFitCovar = new TObjArray(fHistPoints->GetNbins()); + fLocalFitCovar->SetOwner(kTRUE); + + // + // Check histogram + // + Int_t ndim = histo->GetNdimensions(); + for (Int_t idim = 0; idim < ndim; idim++) { + TAxis* axis = histo->GetAxis(idim); + if (axis->GetNbins() < 2) { + ::Error("NDRegression::SetHistogram", "Invalid binning nbins<2 %d", axis->GetNbins()); + return false; + } + if (axis->GetXmin() >= axis->GetXmax()) { + ::Error("NDRegression::SetHistogram", "Invalid range <%f,%f", axis->GetXmin(), axis->GetXmax()); + return false; + } + } + + return true; +} + +//_____________________________________________________________________________ +void NDRegression::EvaluateUni(const Int_t nvectors, const Double_t* data, Double_t& mean, Double_t& sigma, const Int_t hSub) +{ + // + // Robust estimator in 1D case MI version - (faster than ROOT version) + // + // For the univariate case + // estimates of location and scatter are returned in mean and sigma parameters + // the algorithm works on the same principle as in multivariate case - + // it finds a subset of size hSub with smallest sigma, and then returns mean and + // sigma of this subset + // + + Int_t hh = hSub; + if (nvectors < 2) { + ::Error("AliMathBase::EvaluateUni", "%s", Form("nvectors = %d, should be > 1", nvectors)); + return; + } + if (hh == nvectors) { + mean = TMath::Mean(nvectors, data); + sigma = TMath::RMS(nvectors, data); + return; + } + if (hh < 2) + hh = (nvectors + 2) / 2; + Double_t faclts[] = {2.6477, 2.5092, 2.3826, 2.2662, 2.1587, 2.0589, 1.9660, 1.879, 1.7973, 1.7203, 1.6473}; + Int_t* index = new Int_t[nvectors]; + TMath::Sort(nvectors, data, index, kFALSE); + + Int_t nquant = TMath::Min(Int_t(Double_t(((hh * 1. / nvectors) - 0.5) * 40)) + 1, 11); + Double_t factor = faclts[TMath::Max(0, nquant - 1)]; + + Double_t sumx = 0; + Double_t sumx2 = 0; + Double_t bestmean = 0; + Double_t bestsigma = (data[index[nvectors - 1]] - data[index[0]] + 1.); // maximal possible sigma + bestsigma *= bestsigma; + + for (Int_t i = 0; i < hh; i++) { + sumx += data[index[i]]; + sumx2 += data[index[i]] * data[index[i]]; + } + + Double_t norm = 1. / Double_t(hh); + Double_t norm2 = 1. / Double_t(hh - 1); + for (Int_t i = hh; i < nvectors; i++) { + Double_t cmean = sumx * norm; + Double_t csigma = (sumx2 - hh * cmean * cmean) * norm2; + if (csigma < bestsigma) { + bestmean = cmean; + bestsigma = csigma; + } + + sumx += data[index[i]] - data[index[i - hh]]; + sumx2 += data[index[i]] * data[index[i]] - data[index[i - hh]] * data[index[i - hh]]; + } + + Double_t bstd = factor * TMath::Sqrt(TMath::Abs(bestsigma)); + mean = bestmean; + sigma = bstd; + delete[] index; +} + +Bool_t NDRegression::MakeRobustStatistic(TVectorD& values, TVectorD& errors, TObjArray& pointArray, TObjArray& kernelArrayI2, Int_t& nvarFormula, Double_t weightCut, Double_t robustFraction) +{ + // + // Calculate robust statistic information + // use raw array to make faster calcualtion + const Int_t kMaxDim = 100; + Double_t* pvalues = values.GetMatrixArray(); + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + } + + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (robustFraction > 1) + robustFraction = 1; + + Int_t nbins = fHistPoints->GetNbins(); // + Int_t npoints = values.GetNrows(); // number of points for fit + if (fLocalRobustStat) { + delete fLocalRobustStat; + } + fLocalRobustStat = new TMatrixD(nbins, 3); + + TVectorD valueLocal(npoints); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); // + for (Int_t idim = 0; idim < nvarFormula; idim++) { + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + Int_t indexLocal = 0; + for (Int_t ipoint = 0; ipoint < npoints; ipoint++) { + Double_t sumChi2 = 0; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + } + if (sumChi2 > nchi2Cut) + continue; + valueLocal[indexLocal] = pvalues[ipoint]; + indexLocal++; + } + Double_t median = 0, meanX = 0, rmsX = 0; + if (indexLocal * robustFraction - 1 > 3) { + median = TMath::Median(indexLocal, valueLocal.GetMatrixArray()); + EvaluateUni(indexLocal, valueLocal.GetMatrixArray(), meanX, rmsX, indexLocal * robustFraction - 1); + } + (*fLocalRobustStat)(ibin, 0) = median; + (*fLocalRobustStat)(ibin, 1) = meanX; + (*fLocalRobustStat)(ibin, 2) = rmsX; + } + return true; +} + +Bool_t NDRegression::MakeFit(TTree* tree, const char* formulaVal, const char* formulaVar, const char* selection, const char* formulaKernel, const char* dimensionFormula, Double_t weightCut, Int_t entries, Bool_t useBinNorm) +{ + // + // Make a local fit in grid as specified by the input THn histogram + // Histogram has to be set before invocation of method + // + // Output: + // array of fit parameters and covariance matrices + // + // Input Parameters: + // tree - input tree + // formulaVal - : separated variable:error string + // formulaVar - : separate varaible list + // selection - selection (cut) for TTreeDraw + // kernelWidth - : separated list of width of kernel for local fitting + // dimenstionFormula - dummy for the moment + // + //Algorithm: + // 1.) Check consistency of input data + // + // 2.) Cache input data from tree to the array of vector TVectorD + // + // 3.) Calculate robust local mean and robust local RMS in case outlier removal algorithm specified + // + // 4.) Make local fit + // + // const Double_t kEpsilon=1e-6; + const Int_t kMaxDim = 100; + Int_t binRange[kMaxDim] = {0}; + Double_t nchi2Cut = -2 * TMath::Log(weightCut); // transform probability to nsigma cut + if (fHistPoints == NULL) { + ::Error("NDRegression::MakeFit", "ND histogram not initialized\n"); + return kFALSE; + } + if (tree == NULL || tree->GetEntries() == 0) { + ::Error("NDRegression::MakeFit", "Empty tree\n"); + return kFALSE; + } + if (formulaVar == NULL || formulaVar == 0) { + ::Error("NDRegression::MakeFit", "Empty variable list\n"); + return kFALSE; + } + if (formulaKernel == NULL) { + ::Error("NDRegression::MakeFit", "Kernel width not specified\n"); + return kFALSE; + } + fUseBinNorm = useBinNorm; + // + // fInputTree = tree; // should be better TRef? + // fFormulaVal = new TObjString(formulaVal); + // fFormulaVar = new TObjString(formulaVar); + // fSelection = new TObjString(selection); + // fKernelWidthFormula = new TObjString(formulaKernel); + // fPolDimensionFormula = new TObjString(dimensionFormula); + auto arrayFormulaVar = TString(formulaVar).Tokenize(":"); + Int_t nvarFormula = arrayFormulaVar->GetEntries(); + if (nvarFormula != fHistPoints->GetNdimensions()) { + ::Error("NDRegression::MakeFit", "Histogram/points mismatch\n"); + return kFALSE; + } + + // for (int i = 0; i < nvarFormula; i++) { + // std::cout << ((TString*)(arrayFormulaVar->At(i)))->View() << "\n"; + // } + + TObjArray* arrayKernel = TString(formulaKernel).Tokenize(":"); + Int_t nwidthFormula = arrayKernel->GetEntries(); + if (nvarFormula != nwidthFormula) { + delete arrayKernel; + delete arrayFormulaVar; + ::Error("NDRegression::MakeFit", "Variable/Kernel mismath\n"); + return kFALSE; + } + fNParameters = nvarFormula; + + // + // 2.) Load input data + // + // + Int_t entriesVal = tree->Draw(formulaVal, selection, "goffpara", entries); + if (entriesVal == 0) { + ::Error("NDRegression::MakeFit", "Empty point list\t%s\t%s\n", formulaVal, selection); + return kFALSE; + } + if (tree->GetVal(0) == NULL || (tree->GetVal(1) == NULL)) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + TVectorD values(entriesVal, tree->GetVal(0)); + TVectorD errors(entriesVal, tree->GetVal(1)); + Double_t* pvalues = values.GetMatrixArray(); + Double_t* perrors = errors.GetMatrixArray(); + + // std::cout << std::endl; + // values.Print(); + // std::cout << std::endl; + + // 2.b) variables + TObjArray pointArray(nvarFormula); + Int_t entriesVar = tree->Draw(formulaVar, selection, "goffpara", entries); + if (entriesVal != entriesVar) { + ::Error("NDRegression::MakeFit", "Wrong selection\t%s\t%s\n", formulaVar, selection); + return kFALSE; + } + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) + pointArray.AddAt(new TVectorD(entriesVar, tree->GetVal(ipar)), ipar); + + // 2.c) kernel array + TObjArray kernelArrayI2(nvarFormula); + tree->Draw(formulaKernel, selection, "goffpara", entries); + for (Int_t ipar = 0; ipar < nvarFormula; ipar++) { + TVectorD* vdI2 = new TVectorD(entriesVar, tree->GetVal(ipar)); + for (int k = entriesVar; k--;) { // to speed up, precalculate inverse squared + double kv = (*vdI2)[k]; + if (TMath::Abs(kv) < 1e-12) + ::Fatal("NDRegression::MakeFit", "Kernel width=%f for entry %d of par:%d", kv, k, ipar); + (*vdI2)[k] = 1. / (kv * kv); + } + kernelArrayI2.AddAt(vdI2, ipar); + } + // + Double_t* pvecVar[kMaxDim] = {0}; + Double_t* pvecKernelI2[kMaxDim] = {0}; + for (Int_t idim = 0; idim < pointArray.GetEntries(); idim++) { + pvecVar[idim] = ((TVectorD*)(pointArray.At(idim)))->GetMatrixArray(); + pvecKernelI2[idim] = ((TVectorD*)(kernelArrayI2.At(idim)))->GetMatrixArray(); + binRange[idim] = fHistPoints->GetAxis(idim)->GetNbins(); + } + // + // + // + Int_t nbins = fHistPoints->GetNbins(); + fBinIndex = new Int_t[fHistPoints->GetNdimensions()]; + fBinCenter = new Double_t[fHistPoints->GetNdimensions()]; + fBinDelta = new Double_t[fHistPoints->GetNdimensions()]; + fBinWidth = new Double_t[fHistPoints->GetNdimensions()]; + + // + // 3.) + // + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + MakeRobustStatistic(values, errors, pointArray, kernelArrayI2, nvarFormula, weightCut, fRobustFractionLTS); + } + // + + // 4.) Make local fits + // + + Double_t* binHypFit = new Double_t[2 * fHistPoints->GetNdimensions()]; + // + TLinearFitter fitter(1 + 2 * nvarFormula, TString::Format("hyp%d", 2 * nvarFormula).Data()); + for (Int_t ibin = 0; ibin < nbins; ibin++) { + fHistPoints->GetBinContent(ibin, fBinIndex); + Bool_t isUnderFlowBin = kFALSE; + Bool_t isOverFlowBin = kFALSE; + for (Int_t idim = 0; idim < nvarFormula; idim++) { + if (fBinIndex[idim] == 0) + isUnderFlowBin = kTRUE; + if (fBinIndex[idim] > binRange[idim]) + isOverFlowBin = kTRUE; + fBinCenter[idim] = fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + if (isUnderFlowBin || isOverFlowBin) + continue; + fitter.ClearPoints(); + // add fit points + for (Int_t ipoint = 0; ipoint < entriesVal; ipoint++) { + Double_t sumChi2 = 0; + if (fCutType > 0 && fRobustRMSLTSCut > 0) { + Double_t localRMS = (*fLocalRobustStat)(ibin, 2); + Double_t localMean = (*fLocalRobustStat)(ibin, 1); + Double_t localMedian = (*fLocalRobustStat)(ibin, 0); + if (fCutType == 1) { + if (TMath::Abs(pvalues[ipoint] - localMedian) > fRobustRMSLTSCut * localRMS) + continue; + } + if (fCutType == 2) { + if (TMath::Abs(pvalues[ipoint] - localMean) > fRobustRMSLTSCut * localRMS) + continue; + } + } + for (Int_t idim = 0; idim < nvarFormula; idim++) { + //TVectorD &vecVar=*((TVectorD*)(pointArray.UncheckedAt(idim))); + //TVectorD &vecKernel=*((TVectorD*)(kernelArray.UncheckedAt(idim))); + fBinDelta[idim] = pvecVar[idim][ipoint] - fBinCenter[idim]; + sumChi2 += (fBinDelta[idim] * fBinDelta[idim]) * pvecKernelI2[idim][ipoint]; + if (sumChi2 > nchi2Cut) + break; //continue; + if (useBinNorm) { + binHypFit[2 * idim] = fBinDelta[idim] / fBinWidth[idim]; + binHypFit[2 * idim + 1] = binHypFit[2 * idim] * binHypFit[2 * idim]; + } else { + binHypFit[2 * idim] = fBinDelta[idim]; + binHypFit[2 * idim + 1] = fBinDelta[idim] * fBinDelta[idim]; + } + } + if (sumChi2 > nchi2Cut) + continue; + // Double_t weight=TMath::Exp(-sumChi2*0.5); + // fitter.AddPoint(binHypFit,pvalues[ipoint], perrors[ipoint]/weight); + Double_t weightI = TMath::Exp(sumChi2 * 0.5); + fitter.AddPoint(binHypFit, pvalues[ipoint], perrors[ipoint] * weightI); + } + TVectorD* fitParam = new TVectorD(nvarFormula * 2 + 1); + TVectorD* fitQuality = new TVectorD(3); + TMatrixD* fitCovar = new TMatrixD(nvarFormula * 2 + 1, nvarFormula * 2 + 1); + Double_t normRMS = 0; + Int_t nBinPoints = fitter.GetNpoints(); + Bool_t fitOK = kFALSE; + (*fitQuality)[0] = 0; + (*fitQuality)[1] = 0; + (*fitQuality)[2] = 0; + + if (fitter.GetNpoints() > nvarFormula * 2 + 2) { + + if (fCutType == 3) + fitOK = (fitter.EvalRobust() == 0); + else + fitOK = (fitter.Eval() == 0); + + if (fitOK) { + normRMS = fitter.GetChisquare() / (fitter.GetNpoints() - fitter.GetNumberFreeParameters()); + fitter.GetParameters(*fitParam); + fitter.GetCovarianceMatrix(*fitCovar); + (*fitQuality)[0] = nBinPoints; + (*fitQuality)[1] = normRMS; + (*fitQuality)[2] = ibin; + fLocalFitParam->AddAt(fitParam, ibin); + fLocalFitQuality->AddAt(fitQuality, ibin); + fLocalFitCovar->AddAt(fitCovar, ibin); + } + } + if (fStreamer) { + TVectorD pfBinCenter(nvarFormula, fBinCenter); + Double_t median = 0, mean = 0, rms = 0; + if (fLocalRobustStat) { + median = (*fLocalRobustStat)(ibin, 0); + mean = (*fLocalRobustStat)(ibin, 1); + rms = (*fLocalRobustStat)(ibin, 2); + } + (*fStreamer) << "localFit" + << "ibin=" << ibin << // bin index + "fitOK=" << fitOK << "localMedian=" << median << "localMean=" << mean << "localRMS=" << rms << "nBinPoints=" << nBinPoints << // center of the bin + "binCenter.=" << &pfBinCenter << // + "normRMS=" << normRMS << "fitParam.=" << fitParam << "fitCovar.=" << fitCovar << "fitOK=" << fitOK << "\n"; + } + if (!fitOK) { // avoid memory leak for failed fits + delete fitParam; + delete fitQuality; + delete fitCovar; + } + } + + return kTRUE; +} + +Int_t NDRegression::GetVisualCorrectionIndex(const char* corName) +{ + // + return TMath::Hash(corName) % 1000000; /// BUGGY hash in the TMath + // return TString(corName).Hash()%1000000; /// also buggy - even more clashes +} + +void NDRegression::AddVisualCorrection(NDRegression* corr, + Int_t position) +{ + /// make correction available for visualization using + /// TFormula, TFX and TTree::Draw + /// important in order to check corrections and also compute dervied variables + /// e.g correction partial derivatives + /// + /// NOTE - class is not owner of correction + if (position == 0) { + position = GetVisualCorrectionIndex(corr->GetName()); + } + + if (!fgVisualCorrection) + fgVisualCorrection = new TObjArray(1000000); + if (position >= fgVisualCorrection->GetEntriesFast()) + fgVisualCorrection->Expand((position + 10) * 2); + if (fgVisualCorrection->At(position) != NULL) { + ::Error("NDRegression::AddVisualCorrection", + "Correction %d already defined Old: %s New: %s", position, + fgVisualCorrection->At(position)->GetName(), corr->GetName()); + } + fgVisualCorrection->AddAt(corr, position); +} + +NDRegression* + NDRegression::GetVisualCorrection(Int_t position) +{ + /// Get visula correction registered at index=position + return fgVisualCorrection + ? (NDRegression*)fgVisualCorrection->At(position) + : 0; +} + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + return corr->Eval(&par0); +} + +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + return corr->EvalError(&par0); +} + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, + Double_t par1) +{ + // + + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[2] = {par0, par1}; + return corr->Eval(par); +} +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, + Double_t par1) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[2] = {par0, par1}; + return corr->EvalError(par); +} + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, + Double_t par1, Double_t par2) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[3] = {par0, par1, par2}; + return corr->Eval(par); +} + +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, + Double_t par1, Double_t par2) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[3] = {par0, par1, par2}; + return corr->EvalError(par); +} + +Double_t NDRegression::GetCorrND(Double_t index, Double_t par0, + Double_t par1, Double_t par2, + Double_t par3) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[4] = {par0, par1, par2, par3}; + return corr->Eval(par); +} + +Double_t NDRegression::GetCorrNDError(Double_t index, Double_t par0, + Double_t par1, Double_t par2, + Double_t par3) +{ + // + // + NDRegression* corr = + (NDRegression*)fgVisualCorrection->At(index); + if (!corr) + return 0; + Double_t par[4] = {par0, par1, par2, par3}; + return corr->EvalError(par); +} + +Double_t NDRegression::Eval(Double_t* point) +{ + // + // + // + const Double_t almost0 = 0.00000001; + // backward compatibility + if (!fBinWidth) { + fBinWidth = new Double_t[fHistPoints->GetNdimensions()]; + } + + for (Int_t iDim = 0; iDim < fNParameters; iDim++) { + if (point[iDim] <= fHistPoints->GetAxis(iDim)->GetXmin()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmin() + + almost0 * fHistPoints->GetAxis(iDim)->GetBinWidth(0); + if (point[iDim] >= fHistPoints->GetAxis(iDim)->GetXmax()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmax() - + almost0 * fHistPoints->GetAxis(iDim)->GetBinWidth(0); + } + + Int_t ibin = fHistPoints->GetBin(point); + Bool_t rangeOK = kTRUE; + if (ibin >= fLocalFitParam->GetEntriesFast()) { + rangeOK = kFALSE; + } else { + if (fLocalFitParam->UncheckedAt(ibin) == NULL) { + rangeOK = kFALSE; + } + } + if (!rangeOK) + return 0; + + fHistPoints->GetBinContent(ibin, fBinIndex); + for (Int_t idim = 0; idim < fNParameters; idim++) { + fBinCenter[idim] = + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + fBinWidth[idim] = fHistPoints->GetAxis(idim)->GetBinWidth(fBinIndex[idim]); + } + TVectorD& vecParam = *((TVectorD*)fLocalFitParam->At(ibin)); + Double_t value = vecParam[0]; + if (!rangeOK) + return value; + if (fUseBinNorm) { + for (Int_t ipar = 0; ipar < fNParameters; ipar++) { + Double_t delta = (point[ipar] - fBinCenter[ipar]) / fBinWidth[ipar]; + value += + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + } + } else { + for (Int_t ipar = 0; ipar < fNParameters; ipar++) { + Double_t delta = (point[ipar] - fBinCenter[ipar]); + value += + (vecParam[1 + 2 * ipar] + vecParam[1 + 2 * ipar + 1] * delta) * delta; + } + } + return value; +} + +Double_t NDRegression::EvalError(Double_t* point) +{ + // + // + // + if (fLocalFitCovar == NULL) { + ::Error("NDRegression::EvalError", + "Covariance matrix not available"); + return 0; + } + for (Int_t iDim = 0; iDim < fNParameters; iDim++) { + if (point[iDim] < fHistPoints->GetAxis(iDim)->GetXmin()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmin(); + if (point[iDim] > fHistPoints->GetAxis(iDim)->GetXmax()) + point[iDim] = fHistPoints->GetAxis(iDim)->GetXmax(); + } + + Int_t ibin = fHistPoints->GetBin(point); + if (fLocalFitParam->At(ibin) == NULL) + return 0; + fHistPoints->GetBinContent(ibin, fBinIndex); + for (Int_t idim = 0; idim < fNParameters; idim++) { + fBinCenter[idim] = + fHistPoints->GetAxis(idim)->GetBinCenter(fBinIndex[idim]); + } + TMatrixD& vecCovar = *((TMatrixD*)fLocalFitCovar->At(ibin)); + // TVectorD &vecQuality = *((TVectorD*)fLocalFitQuality->At(ibin)); + Double_t value = TMath::Sqrt(vecCovar(0, 0)); // fill covariance to be used + return value; +} diff --git a/Common/NDRegression/src/NDRegressionLinkDef.h b/Common/NDRegression/src/NDRegressionLinkDef.h new file mode 100644 index 0000000000000..ee149996aca0e --- /dev/null +++ b/Common/NDRegression/src/NDRegressionLinkDef.h @@ -0,0 +1,19 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#ifdef __CLING__ +#pragma link off all globals; +#pragma link off all classes; +#pragma link off all functions; + +#pragma link C++ class o2::nd_regression::NDRegression + ; + +#endif diff --git a/Common/NDRegression/test/NDRegression2DGaus.cxx b/Common/NDRegression/test/NDRegression2DGaus.cxx new file mode 100644 index 0000000000000..539b580feb1dc --- /dev/null +++ b/Common/NDRegression/test/NDRegression2DGaus.cxx @@ -0,0 +1,213 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDRegression2DGaus +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK + +#include +#include "NDRegression/NDRegression.h" +#include "TRandom.h" +#include "TFile.h" +#include "THn.h" + +// namespace o2::utils +// { +// class TreeStreamRedirector; +// } + +// using o2::utils::TreeStreamRedirector; +using o2::nd_regression::NDRegression; +using std::make_shared; +using std::make_unique; + +void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) +{ + // + // + // gStyle->SetPadRightMargin(0.05); + // gStyle->SetPadTopMargin(0.05); + // gStyle->SetPadLeftMargin(0.24); + // gStyle->SetPadBottomMargin(0.22); + // gStyle->SetPadTickX(1); + // gStyle->SetPadTickY(1); + // gStyle->SetPadGridX(1); + // gStyle->SetPadGridY(1); + // gStyle->SetOptStat(0); + gPad->SetRightMargin(0.05); + gPad->SetTopMargin(0.05); + gPad->SetLeftMargin(0.14); + gPad->SetBottomMargin(0.12); + gPad->SetTicks(1); + gPad->SetGrid(1); + gPad->SetObjectStat(0); + // + if (color == (kRed + 2)) { + hData->SetMarkerStyle(20); + } + if (color == (kBlue + 2)) { + hData->SetMarkerStyle(21); + } + if (color == (kGreen + 2)) { + hData->SetMarkerStyle(22); + } + hData->SetMarkerSize(0.7); + + hData->SetMarkerColor(color); + hData->SetLineColor(color); + hData->GetXaxis()->SetTitle(xTitle.Data()); + hData->GetYaxis()->SetTitle(yTitle.Data()); + hData->GetZaxis()->SetTitle(zTitle.Data()); + hData->GetXaxis()->SetTitleOffset(1.2); + hData->GetXaxis()->SetTitleSize(0.05); + hData->GetYaxis()->SetTitleOffset(1.3); + hData->GetYaxis()->SetTitleSize(0.05); + hData->GetXaxis()->SetLabelSize(0.035); + hData->GetYaxis()->SetLabelSize(0.035); + hData->GetXaxis()->SetDecimals(); + hData->GetYaxis()->SetDecimals(); + hData->GetZaxis()->SetDecimals(); + hData->Sumw2(); + hData->Draw("pe1 same"); + // hData->Draw("l same"); + + if (mean != 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); + text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); + text1->Draw(); + } + if (rms != 999999. && mean == 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("RMS: %0.2f", rms)); + text1->Draw(); + } +} + +BOOST_AUTO_TEST_CASE(NDRegression2DGaus_test) +{ + auto pfitNDGaus0 = make_unique("pfitNDGaus0", "pfitNDGaus0"); + + auto success = pfitNDGaus0->init(); + BOOST_CHECK(success == true); + + // + // 0.) Initialization of variables and THn + // + Int_t ndim = 2; + Double_t err = 0.1; + + o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); + auto pcstreamOutGaus0 = make_shared("fitNDLocalTestOutputGaus0.root", "recreate"); + + Double_t* xyz = new Double_t[ndim]; + Double_t* sxyz = new Double_t[ndim]; + Int_t* nbins = new Int_t[ndim]; + Double_t* xmin = new Double_t[ndim]; + Double_t* xmax = new Double_t[ndim]; + TString** chxyz = new TString*[ndim]; + for (Int_t idim = 0; idim < ndim; idim++) { + chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); + nbins[idim] = 70; + xmin[idim] = -1; + xmax[idim] = 2; + } + + auto pformula = make_shared("pformula", "cos(7*x[0])*cos(3*x[1])+pow(x[0], x[1])"); + auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); + // + // 1.) generate random input points + // + for (Int_t ipoint = 0; ipoint < 1e4; ipoint++) { + for (Int_t idim = 0; idim < ndim; idim++) { + xyz[idim] = gRandom->Rndm(); + } + Double_t value = pformula->EvalPar(xyz, 0); + Double_t noise = gRandom->Gaus() * err; + Double_t noise2 = noise * (1 + (gRandom->Rndm() < 0.1) * 100); // noise with 10 percent of outliers + Double_t noiseBreit = gRandom->BreitWigner() * err; + pcstreamIn << "testInput" + << "val=" << value << "err=" << err << "noise=" << noise << // gausian noise + "noise2=" << noise2 << // gausian noise + 10% of outliers + "noiseBreit=" << noiseBreit; + for (Int_t idim = 0; idim < ndim; idim++) { + pcstreamIn << "testInput" << chxyz[idim]->Data() << xyz[idim]; + } + pcstreamIn << "testInput" + << "\n"; + } + pcstreamIn.Close(); + std::cout << "testfile done\n"; + + TFile inpf("fitNDLocalTestInput.root"); + BOOST_CHECK(!inpf.IsZombie()); + auto treeIn = (TTree*)(inpf.GetFile()->Get("testInput")); + BOOST_CHECK(treeIn); + + pfitNDGaus0->SetStreamer(pcstreamOutGaus0); + + success = pfitNDGaus0->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + + success = pfitNDGaus0->MakeFit(treeIn, "val+noise:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); // sample Gaussian1 + BOOST_CHECK(success); + + std::cout << "Now drawing...\n"; + + pfitNDGaus0->AddVisualCorrection(pfitNDGaus0.get(), 1); + + TObjArray* array = NDRegression::GetVisualCorrections(); + for (Int_t i = 0; i < array->GetEntries(); i++) { + NDRegression* regression = (NDRegression*)array->At(i); + if (regression == NULL) + continue; + regression->AddVisualCorrection(regression); + Int_t hashIndex = regression->GetVisualCorrectionIndex(); + treeIn->SetAlias(regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)", hashIndex).Data()); + } + pcstreamOutGaus0.reset(); + std::cout << "Entries: " << array->GetEntries() << std::endl; + + TCanvas* canvasGaus = new TCanvas("canvasGaus", "canvasGaus", 800, 800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)", "", ""); + TH1F* inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>gaus(71,-1.1,2.1)", "", ""); + // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); + TH1F* gaus = (TH1F*)gPad->GetPrimitive("gaus"); + Double_t meanGaus = treeIn->GetHistogram()->GetMean(); + Double_t meanGausErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsGaus = treeIn->GetHistogram()->GetRMS(); + Double_t rmsGausErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanGaus) < 10 * meanGausErr) { + ::Info("NDRegression2DGausTest", "mean pull OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + } else { + ::Error("NDRegressionTest", "mean pull NOT OK %3.3f\t+-%3.3f", meanGaus, meanGausErr); + } + if (rmsGaus < 1 + rmsGausErr) { + ::Info("NDRegressionTest", " rms pull OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + } else { + ::Error("NDRegressionTest", " rms pull NOT OK %3.3f\t+-%3.3f", rmsGaus, rmsGausErr); + } + + inputData->Draw("same"); + PlotData(gaus, "Gaus", "counts (arb. units)", kRed + 2, "zTitle", rmsGaus, rmsGausErr, meanGaus, meanGausErr); + canvasGaus->SaveAs("NDRegressionTest.canvasGausTest.png"); + + inpf.Close(); +} diff --git a/Common/NDRegression/test/NDRegression2DIdeal.cxx b/Common/NDRegression/test/NDRegression2DIdeal.cxx new file mode 100644 index 0000000000000..839e768e40849 --- /dev/null +++ b/Common/NDRegression/test/NDRegression2DIdeal.cxx @@ -0,0 +1,227 @@ +// Copyright 2019-2020 CERN and copyright holders of ALICE O2. +// See https://alice-o2.web.cern.ch/copyright for details of the copyright holders. +// All rights not expressly granted are reserved. +// +// This software is distributed under the terms of the GNU General Public +// License v3 (GPL Version 3), copied verbatim in the file "COPYING". +// +// In applying this license CERN does not waive the privileges and immunities +// granted to it by virtue of its status as an Intergovernmental Organization +// or submit itself to any jurisdiction. + +#define BOOST_TEST_MODULE Test NDRegression2DIdeal +#define BOOST_TEST_MAIN +#define BOOST_TEST_DYN_LINK + +#include +#include "NDRegression/NDRegression.h" +#include "TRandom.h" +#include "TFile.h" +#include "THn.h" + +// namespace o2::utils +// { +// class TreeStreamRedirector; +// } + +// using o2::utils::TreeStreamRedirector; +using o2::nd_regression::NDRegression; +using std::make_shared; +using std::make_unique; + +void PlotData(TH1F* hData, TString xTitle = "xTitle", TString yTitle = "yTitle", Color_t color = kBlack, TString zTitle = "zTitle", Double_t rms = 999999., Double_t eRms = 0., Double_t mean = 999999., Double_t eMean = 0.) +{ + // + // + // gStyle->SetPadRightMargin(0.05); + // gStyle->SetPadTopMargin(0.05); + // gStyle->SetPadLeftMargin(0.24); + // gStyle->SetPadBottomMargin(0.22); + // gStyle->SetPadTickX(1); + // gStyle->SetPadTickY(1); + // gStyle->SetPadGridX(1); + // gStyle->SetPadGridY(1); + // gStyle->SetOptStat(0); + gPad->SetRightMargin(0.05); + gPad->SetTopMargin(0.05); + gPad->SetLeftMargin(0.14); + gPad->SetBottomMargin(0.12); + gPad->SetTicks(1); + gPad->SetGrid(1); + gPad->SetObjectStat(0); + // + if (color == (kRed + 2)) { + hData->SetMarkerStyle(20); + } + if (color == (kBlue + 2)) { + hData->SetMarkerStyle(21); + } + if (color == (kGreen + 2)) { + hData->SetMarkerStyle(22); + } + hData->SetMarkerSize(0.7); + + hData->SetMarkerColor(color); + hData->SetLineColor(color); + hData->GetXaxis()->SetTitle(xTitle.Data()); + hData->GetYaxis()->SetTitle(yTitle.Data()); + hData->GetZaxis()->SetTitle(zTitle.Data()); + hData->GetXaxis()->SetTitleOffset(1.2); + hData->GetXaxis()->SetTitleSize(0.05); + hData->GetYaxis()->SetTitleOffset(1.3); + hData->GetYaxis()->SetTitleSize(0.05); + hData->GetXaxis()->SetLabelSize(0.035); + hData->GetYaxis()->SetLabelSize(0.035); + hData->GetXaxis()->SetDecimals(); + hData->GetYaxis()->SetDecimals(); + hData->GetZaxis()->SetDecimals(); + hData->Sumw2(); + hData->Draw("pe1 same"); + // hData->Draw("l same"); + + if (mean != 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.82, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("Mean: %0.2f #pm %0.2f", mean, eMean)); + text1->AddText(Form("RMS: %0.2f #pm %0.2f", rms, eRms)); + text1->Draw(); + } + if (rms != 999999. && mean == 999999.) { + TPaveText* text1 = new TPaveText(0.21, 0.87, 0.51, 0.92, "NDC"); + text1->SetTextFont(43); + text1->SetTextSize(30.); + text1->SetBorderSize(1); + text1->SetFillColor(kWhite); + text1->AddText(Form("RMS: %0.2f", rms)); + text1->Draw(); + } +} + +BOOST_AUTO_TEST_CASE(NDRegression2DIdeal_test) +{ + auto pfitNDIdeal = make_unique("pfitNDIdeal", "pfitNDIdeal"); + auto pfitNDIdeal2 = make_unique("pfitNDIdeal2", "pfitNDIdeal2"); + + auto success = pfitNDIdeal->init(); + BOOST_CHECK(success == true); + success = pfitNDIdeal2->init(); + BOOST_CHECK(success == true); + + // + // 0.) Initialization of variables and THn + // + Int_t ndim = 2; + Double_t err = 0.1; + + o2::utils::TreeStreamRedirector pcstreamIn("fitNDLocalTestInput.root", "recreate"); + auto pcstreamOutIdeal = make_shared("fitNDLocalTestOutputIdeal.root", "recreate"); + auto pcstreamOutIdeal2 = make_shared("fitNDLocalTestOutputIdeal2.root", "recreate"); + + Double_t* xyz = new Double_t[ndim]; + Double_t* sxyz = new Double_t[ndim]; + Int_t* nbins = new Int_t[ndim]; + Double_t* xmin = new Double_t[ndim]; + Double_t* xmax = new Double_t[ndim]; + TString** chxyz = new TString*[ndim]; + for (Int_t idim = 0; idim < ndim; idim++) { + chxyz[idim] = new TString(TString::Format("xyz%d=", idim).Data()); + nbins[idim] = 70; + xmin[idim] = -1; + xmax[idim] = 2; + } + + auto pformula = make_shared("pformula", "cos(7*x[0])*cos(3*x[1])+pow(x[0], x[1])"); + auto hN = make_shared("exampleFit", "exampleFit", ndim, nbins, xmin, xmax); + // + // 1.) generate random input points + // + for (Int_t ipoint = 0; ipoint < 1e4; ipoint++) { + for (Int_t idim = 0; idim < ndim; idim++) { + xyz[idim] = gRandom->Rndm(); + } + Double_t value = pformula->EvalPar(xyz, 0); + Double_t noise = gRandom->Gaus() * err; + Double_t noise2 = noise * (1 + (gRandom->Rndm() < 0.1) * 100); // noise with 10 percent of outliers + Double_t noiseBreit = gRandom->BreitWigner() * err; + pcstreamIn << "testInput" + << "val=" << value << "err=" << err << "noise=" << noise << // gausian noise + "noise2=" << noise2 << // gausian noise + 10% of outliers + "noiseBreit=" << noiseBreit; + for (Int_t idim = 0; idim < ndim; idim++) { + pcstreamIn << "testInput" << chxyz[idim]->Data() << xyz[idim]; + } + pcstreamIn << "testInput" + << "\n"; + } + pcstreamIn.Close(); + std::cout << "testfile done\n"; + + TFile inpf("fitNDLocalTestInput.root"); + BOOST_CHECK(!inpf.IsZombie()); + auto treeIn = (TTree*)(inpf.GetFile()->Get("testInput")); + BOOST_CHECK(treeIn); + + pfitNDIdeal->SetStreamer(pcstreamOutIdeal); + pfitNDIdeal2->SetStreamer(pcstreamOutIdeal2); + + success = pfitNDIdeal->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + success = pfitNDIdeal2->SetHistogram((THn*)((hN->Clone()))); + BOOST_CHECK(success); + + success = pfitNDIdeal->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.02:0.02", "2:2", 0.0001); + BOOST_CHECK(success); + success = pfitNDIdeal2->MakeFit(treeIn, "val:err", "xyz0:xyz1", "Entry$%2==1", "0.08:0.08", "2:2", 0.0001); // sample Gaussian1 + BOOST_CHECK(success); + + std::cout << "Now drawing...\n"; + + pfitNDIdeal->AddVisualCorrection(pfitNDIdeal.get(), 1); + pfitNDIdeal2->AddVisualCorrection(pfitNDIdeal2.get(), 2); + + TObjArray* array = NDRegression::GetVisualCorrections(); + for (Int_t i = 0; i < array->GetEntries(); i++) { + NDRegression* regression = (NDRegression*)array->At(i); + if (regression == NULL) + continue; + regression->AddVisualCorrection(regression); + Int_t hashIndex = regression->GetVisualCorrectionIndex(); + treeIn->SetAlias(regression->GetName(), TString::Format("o2::nd_regression::NDRegression::GetCorrND(%d,xyz0,xyz1+0)", hashIndex).Data()); + } + pcstreamOutIdeal.reset(); + pcstreamOutIdeal2.reset(); + std::cout << "Entries: " << array->GetEntries() << std::endl; + + TCanvas* canvasIdeal = new TCanvas("canvasIdeal", "canvasIdeal", 800, 800); + treeIn->Draw("val>>inputData(71,-1.1,2.1)", "", ""); + TH1F* inputData = (TH1F*)gPad->GetPrimitive("inputData"); + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(1,xyz0,xyz1))>>ideal(71,-1.1,2.1)", "", ""); + // treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(3,xyz0,xyz1)-o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))/sqrt(o2::nd_regression::NDRegression::GetCorrNDError(3,xyz0,xyz1)**2+o2::nd_regression::NDRegression::GetCorrNDError(2,xyz0,xyz1)**2)>>ideal(401,-20.5,20.5)","",""); + TH1F* ideal = (TH1F*)gPad->GetPrimitive("ideal"); + Double_t meanIdeal = treeIn->GetHistogram()->GetMean(); + Double_t meanIdealErr = treeIn->GetHistogram()->GetMeanError(); + Double_t rmsIdeal = treeIn->GetHistogram()->GetRMS(); + Double_t rmsIdealErr = treeIn->GetHistogram()->GetRMSError(); + if (TMath::Abs(meanIdeal) < 10 * meanIdealErr) { + ::Info("NDRegression2DIdealTest", "mean pull OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); + } else { + ::Error("NDRegressionTest", "mean pull NOT OK %3.3f\t+-%3.3f", meanIdeal, meanIdealErr); + } + if (rmsIdeal < 1 + rmsIdealErr) { + ::Info("NDRegressionTest", " rms pull OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); + } else { + ::Error("NDRegressionTest", " rms pull NOT OK %3.3f\t+-%3.3f", rmsIdeal, rmsIdealErr); + } + treeIn->Draw("(o2::nd_regression::NDRegression::GetCorrND(2,xyz0,xyz1))>>ideal2(71,-1.1,2.1)", "", ""); + TH1F* ideal2 = (TH1F*)gPad->GetPrimitive("ideal2"); + + inputData->Draw("same"); + PlotData(ideal, "Ideal", "counts (arb. units)", kRed + 2, "zTitle", rmsIdeal, rmsIdealErr, meanIdeal, meanIdealErr); + PlotData(ideal2, "Ideal", "counts (arb. units)", kGreen + 2, "zTitle"); + canvasIdeal->SaveAs("NDRegressionTest.canvasIdealTest.png"); + + inpf.Close(); +}