From a3a73717bb77969d091d79377f8af56547cbe42e Mon Sep 17 00:00:00 2001 From: mmusich Date: Mon, 6 Jan 2025 13:04:34 +0100 Subject: [PATCH 1/3] improve GeneralPurposeVertexAnalyzer --- .../plugins/GeneralPurposeVertexAnalyzer.cc | 113 +++++++++++++++++- 1 file changed, 109 insertions(+), 4 deletions(-) diff --git a/Alignment/OfflineValidation/plugins/GeneralPurposeVertexAnalyzer.cc b/Alignment/OfflineValidation/plugins/GeneralPurposeVertexAnalyzer.cc index 53acc760566d2..a87f00871727d 100644 --- a/Alignment/OfflineValidation/plugins/GeneralPurposeVertexAnalyzer.cc +++ b/Alignment/OfflineValidation/plugins/GeneralPurposeVertexAnalyzer.cc @@ -52,6 +52,50 @@ using reco::TrackCollection; +namespace gpVertexAnalyzer { + void setBinLog(TAxis *axis) { + int bins = axis->GetNbins(); + float from = axis->GetXmin(); + float to = axis->GetXmax(); + float width = (to - from) / bins; + std::vector new_bins(bins + 1, 0); + for (int i = 0; i <= bins; i++) { + new_bins[i] = TMath::Power(10, from + i * width); + } + axis->Set(bins, new_bins.data()); + } + + void setBinLogX(TH1 *h) { + TAxis *axis = h->GetXaxis(); + setBinLog(axis); + } + + void setBinLogY(TH1 *h) { + TAxis *axis = h->GetYaxis(); + setBinLog(axis); + } + + template + TProfile *makeProfileIfLog(const edm::Service &fs, bool logx, bool logy, Args &&...args) { + auto prof = fs->make(std::forward(args)...); + if (logx) + setBinLogX(prof); + if (logy) + setBinLogY(prof); + return prof; + } + + template + TH1D *makeTH1IfLog(const edm::Service &fs, bool logx, bool logy, Args &&...args) { + auto h1 = fs->make(std::forward(args)...); + if (logx) + setBinLogX(h1); + if (logy) + setBinLogY(h1); + return h1; + } +} // namespace gpVertexAnalyzer + class GeneralPurposeVertexAnalyzer : public edm::one::EDAnalyzer { public: explicit GeneralPurposeVertexAnalyzer(const edm::ParameterSet &); @@ -73,9 +117,9 @@ class GeneralPurposeVertexAnalyzer : public edm::one::EDAnalyzer fs); @@ -128,7 +172,7 @@ class GeneralPurposeVertexAnalyzer : public edm::one::EDAnalyzer("EtaMin"); double EtaMax = config.getParameter("EtaMax"); + int PtBin = config.getParameter("PtBin"); + double PtMin = config.getParameter("PtMin") * pTcut_; + double PtMax = config.getParameter("PtMax") * pTcut_; + IP_ = fs->make(fmt::format("d{}_pt{}", varname_, pTcut_).c_str(), fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}} (#mum)", pTcut_, varname_).c_str(), VarBin, @@ -169,6 +217,13 @@ void GeneralPurposeVertexAnalyzer::IPMonitoring::bookIPMonitor(const edm::Parame 0., (varname_.find("xy") != std::string::npos) ? 2000. : 10000.); + IPPull_ = fs->make( + fmt::format("d{}Pull_pt{}", varname_, pTcut_).c_str(), + fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}}/#sigma_{{d_{{{}}}}}", pTcut_, varname_, varname_).c_str(), + 100, + -5., + 5.); + IPVsPhi_ = fs->make(fmt::format("d{}VsPhi_pt{}", varname_, pTcut_).c_str(), fmt::format("PV tracks (p_{{T}} > {}) d_{{{}}} VS track #phi", pTcut_, varname_).c_str(), @@ -193,6 +248,21 @@ void GeneralPurposeVertexAnalyzer::IPMonitoring::bookIPMonitor(const edm::Parame IPVsEta_->SetXTitle("PV track (p_{T} > 1 GeV) #eta"); IPVsEta_->SetYTitle(fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}} (#mum)", pTcut_, varname_).c_str()); + IPVsPt_ = gpVertexAnalyzer::makeProfileIfLog( + fs, + true, /* x-axis */ + false, /* y-axis */ + fmt::format("d{}VsPt_pt{}", varname_, pTcut_).c_str(), + fmt::format("PV tracks (p_{{T}} > {}) d_{{{}}} VS track p_{{T}}", pTcut_, varname_).c_str(), + PtBin, + log10(PtMin), + log10(PtMax), + VarMin, + VarMax, + ""); + IPVsPt_->SetXTitle("PV track (p_{T} > 1 GeV) p_{T} [GeV]"); + IPVsPt_->SetYTitle(fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}} (#mum)", pTcut_, varname_).c_str()); + IPErrVsPhi_ = fs->make(fmt::format("d{}ErrVsPhi_pt{}", varname_, pTcut_).c_str(), fmt::format("PV tracks (p_{{T}} > {}) d_{{{}}} error VS track #phi", pTcut_, varname_).c_str(), @@ -217,6 +287,21 @@ void GeneralPurposeVertexAnalyzer::IPMonitoring::bookIPMonitor(const edm::Parame IPErrVsEta_->SetXTitle("PV track (p_{T} > 1 GeV) #eta"); IPErrVsEta_->SetYTitle(fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}} error (#mum)", pTcut_, varname_).c_str()); + IPErrVsPt_ = gpVertexAnalyzer::makeProfileIfLog( + fs, + true, /* x-axis */ + false, /* y-axis */ + fmt::format("d{}ErrVsPt_pt{}", varname_, pTcut_).c_str(), + fmt::format("PV tracks (p_{{T}} > {}) d_{{{}}} error VS track p_{{T}}", pTcut_, varname_).c_str(), + PtBin, + log10(PtMin), + log10(PtMax), + VarMin, + VarMax, + ""); + IPErrVsPt_->SetXTitle("PV track (p_{T} > 1 GeV) p_{T} [GeV]"); + IPErrVsPt_->SetYTitle(fmt::format("PV tracks (p_{{T}} > {} GeV) d_{{{}}} error (#mum)", pTcut_, varname_).c_str()); + IPVsEtaVsPhi_ = fs->make( fmt::format("d{}VsEtaVsPhi_pt{}", varname_, pTcut_).c_str(), fmt::format("PV tracks (p_{{T}} > {}) d_{{{}}} VS track #eta VS track #phi", pTcut_, varname_).c_str(), @@ -294,6 +379,7 @@ GeneralPurposeVertexAnalyzer::GeneralPurposeVertexAnalyzer(const edm::ParameterS bsBeamWidthX(nullptr), bsBeamWidthY(nullptr), bsType(nullptr), + trackpt(nullptr), sumpt(nullptr), ntracks(nullptr), weight(nullptr), @@ -411,6 +497,8 @@ void GeneralPurposeVertexAnalyzer::pvTracksPlots(const reco::Vertex &v) { } const float pt = t->pt(); + trackpt->Fill(pt); + if (pt < 1.f) { continue; } @@ -435,22 +523,28 @@ void GeneralPurposeVertexAnalyzer::pvTracksPlots(const reco::Vertex &v) { phi_pt1->Fill(phi); eta_pt1->Fill(eta); + // dxy pT>1 + dxy_pt1.IP_->Fill(Dxy); dxy_pt1.IPVsPhi_->Fill(phi, Dxy); dxy_pt1.IPVsEta_->Fill(eta, Dxy); dxy_pt1.IPVsEtaVsPhi_->Fill(eta, phi, Dxy); dxy_pt1.IPErr_->Fill(DxyErr); + dxy_pt1.IPPull_->Fill(Dxy / DxyErr); dxy_pt1.IPErrVsPhi_->Fill(phi, DxyErr); dxy_pt1.IPErrVsEta_->Fill(eta, DxyErr); dxy_pt1.IPErrVsEtaVsPhi_->Fill(eta, phi, DxyErr); + // dz pT>1 + dz_pt1.IP_->Fill(Dz); dz_pt1.IPVsPhi_->Fill(phi, Dz); dz_pt1.IPVsEta_->Fill(eta, Dz); dz_pt1.IPVsEtaVsPhi_->Fill(eta, phi, Dz); dz_pt1.IPErr_->Fill(DzErr); + dz_pt1.IPPull_->Fill(Dz / DzErr); dz_pt1.IPErrVsPhi_->Fill(phi, DzErr); dz_pt1.IPErrVsEta_->Fill(eta, DzErr); dz_pt1.IPErrVsEtaVsPhi_->Fill(eta, phi, DzErr); @@ -459,22 +553,28 @@ void GeneralPurposeVertexAnalyzer::pvTracksPlots(const reco::Vertex &v) { phi_pt10->Fill(phi); eta_pt10->Fill(eta); + // dxy pT>10 + dxy_pt10.IP_->Fill(Dxy); dxy_pt10.IPVsPhi_->Fill(phi, Dxy); dxy_pt10.IPVsEta_->Fill(eta, Dxy); dxy_pt10.IPVsEtaVsPhi_->Fill(eta, phi, Dxy); dxy_pt10.IPErr_->Fill(DxyErr); + dxy_pt10.IPPull_->Fill(Dxy / DxyErr); dxy_pt10.IPErrVsPhi_->Fill(phi, DxyErr); dxy_pt10.IPErrVsEta_->Fill(eta, DxyErr); dxy_pt10.IPErrVsEtaVsPhi_->Fill(eta, phi, DxyErr); + // dz pT>10 + dz_pt10.IP_->Fill(Dz); dz_pt10.IPVsPhi_->Fill(phi, Dz); dz_pt10.IPVsEta_->Fill(eta, Dz); dz_pt10.IPVsEtaVsPhi_->Fill(eta, phi, Dz); dz_pt10.IPErr_->Fill(DzErr); + dz_pt10.IPPull_->Fill(Dz / DzErr); dz_pt10.IPErrVsPhi_->Fill(phi, DzErr); dz_pt10.IPErrVsEta_->Fill(eta, DzErr); dz_pt10.IPErrVsEtaVsPhi_->Fill(eta, phi, DzErr); @@ -649,6 +749,8 @@ void GeneralPurposeVertexAnalyzer::beginJob() { dxy2 = book("dxyzoom", fmt::sprintf("%s d_{xy} (#mum)", s_1).c_str(), dxyBin_, dxyMin_ / 5., dxyMax_ / 5.); + trackpt = gpVertexAnalyzer::makeTH1IfLog( + fs_, true, false, "pt_track", "PV tracks p_{T};PV tracks p_{T} [GeV];#tracks", 49, log10(1.), log10(50.)); phi_pt1 = book("phi_pt1", fmt::sprintf("%s #phi; PV tracks #phi;#tracks", s_1).c_str(), phiBin_, phiMin_, phiMax_); eta_pt1 = @@ -701,6 +803,9 @@ void GeneralPurposeVertexAnalyzer::fillDescriptions(edm::ConfigurationDescriptio desc.add("EtaBin2D", 8); desc.add("EtaMin", -2.7); desc.add("EtaMax", 2.7); + desc.add("PtBin", 49); + desc.add("PtMin", 1.); + desc.add("PtMax", 50.); descriptions.addWithDefaultLabel(desc); } From 90f00248e34ef9685b484c074738acf7bdc85628 Mon Sep 17 00:00:00 2001 From: mmusich Date: Tue, 7 Jan 2025 11:22:45 +0100 Subject: [PATCH 2/3] add Generic Validation as dataset validation tool --- Alignment/OfflineValidation/bin/BuildFile.xml | 1 + .../OfflineValidation/bin/GenericVmerge.cc | 73 ++++++ .../python/TkAlAllInOneTool/GenericV.py | 96 ++++++++ .../python/TkAlAllInOneTool/GenericV_cfg.py | 223 ++++++++++++++++++ .../scripts/validateAlignments.py | 8 + .../OfflineValidation/test/BuildFile.xml | 3 + .../test/testingScripts/test_unitGeneric.sh | 16 ++ .../OfflineValidation/test/unit_test.json | 17 ++ .../OfflineValidation/test/unit_test.yaml | 18 ++ 9 files changed, 455 insertions(+) create mode 100644 Alignment/OfflineValidation/bin/GenericVmerge.cc create mode 100644 Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV.py create mode 100644 Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV_cfg.py create mode 100755 Alignment/OfflineValidation/test/testingScripts/test_unitGeneric.sh diff --git a/Alignment/OfflineValidation/bin/BuildFile.xml b/Alignment/OfflineValidation/bin/BuildFile.xml index 023f203f5c619..b565094c92cb2 100644 --- a/Alignment/OfflineValidation/bin/BuildFile.xml +++ b/Alignment/OfflineValidation/bin/BuildFile.xml @@ -18,6 +18,7 @@ + diff --git a/Alignment/OfflineValidation/bin/GenericVmerge.cc b/Alignment/OfflineValidation/bin/GenericVmerge.cc new file mode 100644 index 0000000000000..5d1b7f3ad1dfc --- /dev/null +++ b/Alignment/OfflineValidation/bin/GenericVmerge.cc @@ -0,0 +1,73 @@ +#include +#include +#include +#include +#include + +#include "exceptions.h" +#include "toolbox.h" +#include "Options.h" + +#include "boost/filesystem.hpp" +#include "boost/property_tree/ptree.hpp" +#include "boost/property_tree/json_parser.hpp" +#include "boost/optional.hpp" + +#include "TString.h" +#include "TASImage.h" +#include "TGraph.h" + +#include "Alignment/OfflineValidation/macros/loopAndPlot.C" +#include "Alignment/OfflineValidation/interface/TkAlStyle.h" + +using namespace std; +using namespace AllInOneConfig; + +namespace pt = boost::property_tree; + +int merge(int argc, char* argv[]) { + // parse the command line + + Options options; + options.helper(argc, argv); + options.parser(argc, argv); + + //Read in AllInOne json config + pt::ptree main_tree; + pt::read_json(options.config, main_tree); + + pt::ptree alignments = main_tree.get_child("alignments"); + pt::ptree validation = main_tree.get_child("validation"); + + TString filesAndLabels; + for (const auto& childTree : alignments) { + // Print node name and its attributes + // std::cout << "Node: " << childTree.first << std::endl; + // for (const auto& attr : childTree.second) { + // std::cout << " Attribute: " << attr.first << " = " << attr.second.data() << std::endl; + // } + + std::string file = childTree.second.get("file"); + std::cout << file << std::endl; + std::cout << childTree.second.get("title") << std::endl; + + // Check if the file contains "/eos/cms/" and add the prefix accordingly + std::string prefixToAdd = file.find("/eos/cms/") != std::string::npos ? "root://eoscms.cern.ch/" : ""; + std::string toAdd = prefixToAdd + file + "/GenericValidation.root=" + childTree.second.get("title") + ","; + filesAndLabels += toAdd; + } + + if (filesAndLabels.Length() > 0) { + filesAndLabels.Remove(filesAndLabels.Length() - 1); // Remove the last character + } + + std::cout << "filesAndLabels: " << filesAndLabels << std::endl; + + loopAndPlot(filesAndLabels); + + return EXIT_SUCCESS; +} + +#ifndef DOXYGEN_SHOULD_SKIP_THIS +int main(int argc, char* argv[]) { return exceptions(argc, argv); } +#endif diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV.py new file mode 100644 index 0000000000000..cfb66d903537e --- /dev/null +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV.py @@ -0,0 +1,96 @@ +import copy +import os + +def GenericV(config, validationDir): + ##List with all jobs + jobs = [] + GenericVType = "single" + + ##List with all wished IOVs + IOVs = [] + + ##Start with single GenericV jobs + if not GenericVType in config["validations"]["Generic"]: + raise Exception("No 'single' key word in config for GenericV") + + for singleName in config["validations"]["Generic"][GenericVType]: + for IOV in config["validations"]["Generic"][GenericVType][singleName]["IOV"]: + ##Save IOV to loop later for merge jobs + if not IOV in IOVs: + IOVs.append(IOV) + + for alignment in config["validations"]["Generic"][GenericVType][singleName]["alignments"]: + ##Work directory for each IOV + workDir = "{}/GenericV/{}/{}/{}/{}".format(validationDir, GenericVType, singleName, alignment, IOV) + + ##Write local config + local = {} + local["output"] = "{}/{}/GenericV/{}/{}/{}/{}".format(config["LFS"], config["name"], GenericVType, alignment, singleName, IOV) + local["alignment"] = copy.deepcopy(config["alignments"][alignment]) + local["validation"] = copy.deepcopy(config["validations"]["Generic"][GenericVType][singleName]) + local["validation"].pop("alignments") + local["validation"]["IOV"] = IOV + if "dataset" in local["validation"]: + local["validation"]["dataset"] = local["validation"]["dataset"].format(IOV) + if "goodlumi" in local["validation"]: + local["validation"]["goodlumi"] = local["validation"]["goodlumi"].format(IOV) + + ##Write job info + job = { + "name": "GenericV_{}_{}_{}_{}".format(GenericVType, alignment, singleName, IOV), + "dir": workDir, + "exe": "cmsRun", + "cms-config": "{}/src/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV_cfg.py".format(os.environ["CMSSW_BASE"]), + "run-mode": "Condor", + "dependencies": [], + "config": local, + } + + jobs.append(job) + + ##Do merge GenericV if wished + if "merge" in config["validations"]["Generic"]: + ##List with merge jobs, will be expanded to jobs after looping + mergeJobs = [] + GenericVType = "merge" + + ##Loop over all merge jobs/IOVs which are wished + for mergeName in config["validations"]["Generic"][GenericVType]: + for IOV in IOVs: + ##Work directory for each IOV + workDir = "{}/GenericV/{}/{}/{}".format(validationDir, GenericVType, mergeName, IOV) + + ##Write job info + local = {} + + job = { + "name": "GenericV_{}_{}_{}".format(GenericVType, mergeName, IOV), + "dir": workDir, + "exe": "GenericVmerge", + "run-mode": "Condor", + "dependencies": [], + "config": local, + } + + for alignment in config["alignments"]: + ##Deep copy necessary things from global config + local.setdefault("alignments", {}) + if alignment in config["validations"]["Generic"]["single"][mergeName]["alignments"]: + local["alignments"][alignment] = copy.deepcopy(config["alignments"][alignment]) + local["validation"] = copy.deepcopy(config["validations"]["Generic"][GenericVType][mergeName]) + local["output"] = "{}/{}/GenericV/{}/{}/{}".format(config["LFS"], config["name"], GenericVType, mergeName, IOV) + + ##Loop over all single jobs + for singleJob in jobs: + ##Get single job info and append to merge job if requirements fullfilled + alignment, singleName, singleIOV = singleJob["name"].split("_")[2:] + + if int(singleIOV) == IOV and singleName in config["validations"]["Generic"][GenericVType][mergeName]["singles"]: + local["alignments"][alignment]["file"] = singleJob["config"]["output"] + job["dependencies"].append(singleJob["name"]) + + mergeJobs.append(job) + + jobs.extend(mergeJobs) + + return jobs diff --git a/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV_cfg.py b/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV_cfg.py new file mode 100644 index 0000000000000..80107d9510e63 --- /dev/null +++ b/Alignment/OfflineValidation/python/TkAlAllInOneTool/GenericV_cfg.py @@ -0,0 +1,223 @@ +import json +import os +import FWCore.ParameterSet.Config as cms +import FWCore.PythonUtilities.LumiList as LumiList +from Alignment.OfflineValidation.TkAlAllInOneTool.defaultInputFiles_cff import filesDefaultMC_NoPU +from FWCore.ParameterSet.VarParsing import VarParsing + +################################################################### +# Define process +################################################################### +process = cms.Process("GenericTrackAndVertexValidation") + +################################################################### +# Argument parsing +################################################################### +options = VarParsing() +options.register("config", "", VarParsing.multiplicity.singleton, VarParsing.varType.string , "AllInOne config") +options.parseArguments() + +################################################################### +# Read in AllInOne config in JSON format +################################################################### +if options.config == "": + config = {"validation": {}, + "alignment": {}} +else: + with open(options.config, "r") as configFile: + config = json.load(configFile) + +################################################################### +# Read filenames from given TXT file and define input source +################################################################### +readFiles = [] + +if "dataset" in config["validation"]: + with open(config["validation"]["dataset"], "r") as datafiles: + for fileName in datafiles.readlines(): + readFiles.append(fileName.replace("\n", "")) + + process.source = cms.Source("PoolSource", + fileNames = cms.untracked.vstring(readFiles), + skipEvents = cms.untracked.uint32(0) + ) +else: + print(">>>>>>>>>> GenericV_cfg.py: msg%-i: config not specified! Loading default MC simulation -> filesDefaultMC_NoPU!") + process.source = cms.Source("PoolSource", + fileNames = filesDefaultMC_NoPU, + skipEvents = cms.untracked.uint32(0) + ) + +################################################################### +# Get good lumi section and load data or handle MC +################################################################### +if "goodlumi" in config["validation"]: + if os.path.isfile(config["validation"]["goodlumi"]): + goodLumiSecs = cms.untracked.VLuminosityBlockRange(LumiList.LumiList(filename = config["validation"]["goodlumi"]).getCMSSWString().split(',')) + else: + print("Does not exist: {}. Continue without good lumi section file.") + goodLumiSecs = cms.untracked.VLuminosityBlockRange() + +else: + goodLumiSecs = cms.untracked.VLuminosityBlockRange() + +################################################################### +# Runs and events +################################################################### +runboundary = config["validation"].get("runboundary", 1) +isMultipleRuns=False +if(isinstance(runboundary, (list, tuple))): + isMultipleRuns=True + print("Multiple Runs are selected") +if(isMultipleRuns): + process.source.firstRun = cms.untracked.uint32(runboundary[0]) +else: + process.source.firstRun = cms.untracked.uint32(runboundary) + +################################################################### +# Default set to 1 for unit tests +################################################################### +process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(config["validation"].get("maxevents", 1))) + +################################################################### +# Bookeeping +################################################################### +process.options = cms.untracked.PSet( + wantSummary = cms.untracked.bool(False), + Rethrow = cms.untracked.vstring("ProductNotFound"), # make this exception fatal + fileMode = cms.untracked.string('NOMERGE'), # no ordering needed, but calls endRun/beginRun etc. at file boundaries +) + +################################################################### +# Messages +################################################################### +process.load("FWCore.MessageLogger.MessageLogger_cfi") +process.MessageLogger.cerr.FwkReport.reportEvery = 1000 +process.MessageLogger.cout.enableStatistics = cms.untracked.bool(True) + +################################################################### +# Basic modules +################################################################### +process.load("RecoVertex.BeamSpotProducer.BeamSpot_cff") +process.load("Configuration.Geometry.GeometryDB_cff") +process.load('Configuration.StandardSequences.Services_cff') +process.load("Configuration.StandardSequences.MagneticField_cff") + +#################################################################### +# Load and Configure Track refitter +#################################################################### +process.load("RecoTracker.TrackProducer.TrackRefitters_cff") +process.TrackRefitter.src = config["validation"].get("trackcollection", "generalTracks") +process.TrackRefitter.TTRHBuilder = config["validation"].get("tthrbuilder", "WithAngleAndTemplate") +process.TrackRefitter.NavigationSchool = "" + +#################################################################### +# Global tag +#################################################################### +process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") +from Configuration.AlCa.GlobalTag import GlobalTag +process.GlobalTag = GlobalTag(process.GlobalTag, config["alignment"].get("globaltag", "auto:phase1_2017_realistic")) + +#################################################################### +# Load conditions if wished +#################################################################### +if "conditions" in config["alignment"]: + from CalibTracker.Configuration.Common.PoolDBESSource_cfi import poolDBESSource + + for condition in config["alignment"]["conditions"]: + setattr(process, "conditionsIn{}".format(condition), poolDBESSource.clone( + connect = cms.string(str(config["alignment"]["conditions"][condition]["connect"])), + toGet = cms.VPSet( + cms.PSet( + record = cms.string(str(condition)), + tag = cms.string(str(config["alignment"]["conditions"][condition]["tag"])) + ) + ) + ) + ) + + setattr(process, "prefer_conditionsIn{}".format(condition), cms.ESPrefer("PoolDBESSource", "conditionsIn{}".format(condition))) + +################################################################### +# TFileService +################################################################### +process.TFileService = cms.Service("TFileService", + fileName = cms.string("{}/GenericValidation.root".format(config.get("output", os.getcwd()))), + closeFileFast = cms.untracked.bool(True)) + +print("Saving the output at %s" % process.TFileService.fileName.value()) + +################################################################### +# Primary Vertex refit +################################################################### +process.load("TrackingTools.TransientTrack.TransientTrackBuilder_cfi") +from RecoVertex.PrimaryVertexProducer.OfflinePrimaryVertices_cfi import offlinePrimaryVertices as _offlinePVs +process.offlinePrimaryVerticesFromRefittedTrks = _offlinePVs.clone( + TrackLabel = "TrackRefitter", + TkFilterParameters = dict( + maxNormalizedChi2 = 20, + minSiliconLayersWithHits = 5, + maxD0Significance = 5.0, + maxD0Error = 1.0, + maxDzError = 1.0, + minPixelLayersWithHits = 2 + ) +) + +################################################################### +# The analysis modules +################################################################### +process.trackanalysis = cms.EDAnalyzer("GeneralPurposeTrackAnalyzer", + TkTag = cms.InputTag("TrackRefitter"), + isCosmics = cms.bool(config["validation"].get("isCosmics", False)) + ) + +process.vertexanalysis = cms.EDAnalyzer('GeneralPurposeVertexAnalyzer', + ndof = cms.int32(4), + vertexLabel = cms.InputTag('offlinePrimaryVerticesFromRefittedTrks'), + beamSpotLabel = cms.InputTag('offlineBeamSpot'), + Xpos = cms.double(0.1), + Ypos = cms.double(0), + TkSizeBin = cms.int32(100), + TkSizeMin = cms.double(499.5), + TkSizeMax = cms.double(-0.5), + DxyBin = cms.int32(100), + DxyMin = cms.double(5000), + DxyMax = cms.double(-5000), + DzBin = cms.int32(100), + DzMin = cms.double(-2000), + DzMax = cms.double(2000), + PhiBin = cms.int32(32), + PhiBin2D = cms.int32(12), + PhiMin = cms.double(-3.1415926535897931), + PhiMax = cms.double(3.1415926535897931), + EtaBin = cms.int32(26), + EtaBin2D = cms.int32(8), + EtaMin = cms.double(-2.7), + EtaMax = cms.double(2.7)) + +process.theValidSequence = cms.Sequence(process.offlineBeamSpot + + process.TrackRefitter + + process.offlinePrimaryVerticesFromRefittedTrks + + process.trackanalysis + + process.vertexanalysis) + +################################################################### +# The trigger filter module +################################################################### +from HLTrigger.HLTfilters.triggerResultsFilter_cfi import * +process.theHLTFilter = triggerResultsFilter.clone( + triggerConditions = cms.vstring(config["validation"].get("triggerBits", "*")), + hltResults = cms.InputTag( "TriggerResults", "", "HLT" ), + l1tResults = cms.InputTag( "" ), + throw = cms.bool(False) +) + +HLTSel = config["validation"].get("HLTselection", False) + +if (HLTSel): + process.p = cms.Path(process.theHLTFilter + process.theValidSequence) +else: + process.p = cms.Path(process.theValidSequence) + +print("Done") diff --git a/Alignment/OfflineValidation/scripts/validateAlignments.py b/Alignment/OfflineValidation/scripts/validateAlignments.py index 55469cfcda309..b6848ada4d3e6 100755 --- a/Alignment/OfflineValidation/scripts/validateAlignments.py +++ b/Alignment/OfflineValidation/scripts/validateAlignments.py @@ -20,6 +20,7 @@ import Alignment.OfflineValidation.TkAlAllInOneTool.DiMuonV as DiMuonV import Alignment.OfflineValidation.TkAlAllInOneTool.MTS as MTS import Alignment.OfflineValidation.TkAlAllInOneTool.PixBary as PixBary +import Alignment.OfflineValidation.TkAlAllInOneTool.GenericV as GenericV ############################################## def parser(): @@ -264,12 +265,19 @@ def main(): elif validation == "JetHT": jobs.extend(JetHT.JetHT(config, validationDir)) + elif validation == "DiMuonV": jobs.extend(DiMuonV.DiMuonV(config, validationDir)) + elif validation == "MTS": jobs.extend(MTS.MTS(config, validationDir)) + elif validation == "PixBary": jobs.extend(PixBary.PixBary(config, validationDir, args.verbose)) + + elif validation == "Generic": + jobs.extend(GenericV.GenericV(config, validationDir)) + else: raise Exception("Unknown validation method: {}".format(validation)) diff --git a/Alignment/OfflineValidation/test/BuildFile.xml b/Alignment/OfflineValidation/test/BuildFile.xml index f37c002324c15..a37e4904ab810 100644 --- a/Alignment/OfflineValidation/test/BuildFile.xml +++ b/Alignment/OfflineValidation/test/BuildFile.xml @@ -27,6 +27,9 @@ + + + diff --git a/Alignment/OfflineValidation/test/testingScripts/test_unitGeneric.sh b/Alignment/OfflineValidation/test/testingScripts/test_unitGeneric.sh new file mode 100755 index 0000000000000..8877d23f3569d --- /dev/null +++ b/Alignment/OfflineValidation/test/testingScripts/test_unitGeneric.sh @@ -0,0 +1,16 @@ +#! /bin/bash + +function die { echo $1: status $2 ; exit $2; } + +echo "TESTING Alignment/Generic single configuration with json..." +pushd test_yaml/GenericV/single/testUnits/unitTest/1/ +./cmsRun validation_cfg.py config=validation.json || die "Failure running Generic single configuration with json" $? + +echo "TESTING Alignment/Generic single configuration standalone..." +./cmsRun validation_cfg.py || die "Failure running PV single configuration standalone" $? +popd + +echo "TESTING SplotV merge step" +pushd test_yaml/GenericV/merge/testUnits/1/ +./GenericVmerge validation.json --verbose || die "Failure running PV merge step" $? +popd diff --git a/Alignment/OfflineValidation/test/unit_test.json b/Alignment/OfflineValidation/test/unit_test.json index cde695aa859b3..f98590e6b4c0d 100644 --- a/Alignment/OfflineValidation/test/unit_test.json +++ b/Alignment/OfflineValidation/test/unit_test.json @@ -192,6 +192,23 @@ } } }, + "Generic": { + "merge": { + "testUnits": { + "singles": ["testUnits"] + } + }, + "single": { + "testUnits": { + "IOV": ["1"], + "alignments": ["unitTest"], + "trackcollection": "generalTracks", + "HLTSelection": "False", + "triggerBits" : "HLT_*", + "maxevents": "10" + } + } + }, "DiMuonV" : { "merge": { "testUnits": { diff --git a/Alignment/OfflineValidation/test/unit_test.yaml b/Alignment/OfflineValidation/test/unit_test.yaml index 92519e519172c..dc1a73476d1e7 100644 --- a/Alignment/OfflineValidation/test/unit_test.yaml +++ b/Alignment/OfflineValidation/test/unit_test.yaml @@ -187,6 +187,24 @@ validations: HLTSelection: False triggerBits: HLT_* maxevents: 10 + + Generic: + merge: + testUnits: + singles: + - testUnits + + single: + testUnits: + IOV: + - 1 + alignments: + - unitTest + trackcollection: generalTracks + HLTSelection: False + triggerBits: HLT_* + maxevents: 10 + DiMuonV: merge: testUnits: From b6031b080a70e0a7d9f0fac50766eed2ac721960 Mon Sep 17 00:00:00 2001 From: mmusich Date: Wed, 8 Jan 2025 11:16:18 +0100 Subject: [PATCH 3/3] add documentation for Generic Validation --- Alignment/OfflineValidation/README.md | 6 +++ Alignment/OfflineValidation/README_Generic.md | 39 +++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 Alignment/OfflineValidation/README_Generic.md diff --git a/Alignment/OfflineValidation/README.md b/Alignment/OfflineValidation/README.md index d7c4554d9812d..499ca94d70660 100644 --- a/Alignment/OfflineValidation/README.md +++ b/Alignment/OfflineValidation/README.md @@ -117,5 +117,11 @@ For details read [`README_JetHT.md`](https://github.com/cms-sw/cmssw/blob/master ## MTS validation For details read [`README_MTS.md`](https://github.com/cms-sw/cmssw/blob/master/Alignment/OfflineValidation/README_MTS.md) +## Pixel BaryCenter +For details read [`README_MTS.md`](https://github.com/cms-sw/cmssw/blob/master/Alignment/OfflineValidation/README_PixBary.md) + +## Generic validation (dataset validation) +For details read [`README_Generic.md`](https://github.com/cms-sw/cmssw/blob/master/Alignment/OfflineValidation/README_Generic.md) + ## General info about IOV/run arguments For details read [`README_IOV.md`](https://github.com/cms-sw/cmssw/blob/master/Alignment/OfflineValidation/README_IOV.md) diff --git a/Alignment/OfflineValidation/README_Generic.md b/Alignment/OfflineValidation/README_Generic.md new file mode 100644 index 0000000000000..aa84a9693abd6 --- /dev/null +++ b/Alignment/OfflineValidation/README_Generic.md @@ -0,0 +1,39 @@ +# Generic Validation + +## General info +``` +validations: + Generic: + : + : + +``` + +Generic validation runs in 2 possible types of steps: + - single (validation analysis by GenericV_cfg.py) + - (optional) merge (GenericVmerge macro) +Step name is and arbitrary string which will be used as a reference for consequent steps. +Merge job awill only start if all corresponding single jobs are done. +Merge jobs can run in parallel. + +## Single Generic jobs +Single jobs can be specified per run (IoV as well). In case of MC, IoV is specified to arbitrary 1. + +Variable | Default value | Explanation/Options +-------- | ------------- | -------------------- +IOV | None | List of IOVs/runs defined by integer value. IOV 1 is reserved for MC. +Alignments | None | List of alignments. Will create separate directory for each. +dataset | See defaultInputFiles_cff.py | Path to txt file containing list of datasets to be used. If file is missing at EOS or is corrupted - job will eventually fail (most common issue). +goodlumi | cms.untracked.VLuminosityBlockRange() | Path to json file containing lumi information about selected IoV - must contain list of runs under particular IoV with lumiblock info. Format: `IOV_Vali_{}.json` +maxevents | 1 | Maximum number of events before cmsRun terminates. +trackcollection | "generalTracks" | Track collection to be specified here, e.g. "ALCARECOTkAlMuonIsolated" or "ALCARECOTkAlMinBias" ... +tthrbuilder | "WithAngleAndTemplate" | Specify TTRH Builder + +## Merge Generic job +Its name do not need to match single job name but option `singles` must list all single jobs to be merged. +Generic merged plot style can be adjusted from global plotting style. + +Variable | Default value | Explanation/Options +-------- | ------------- | -------------------- +singles | None | List of strings matching single job names to be merged in one plot. +customrighttitle | "" | Top right title. Reserved word "IOV" will be replaced for given IOV/run in the list.