diff --git a/doc/releases/changelog-dev.md b/doc/releases/changelog-dev.md index a60fbbba..4e9eb059 100644 --- a/doc/releases/changelog-dev.md +++ b/doc/releases/changelog-dev.md @@ -6,6 +6,14 @@ ## Bug fixes + * Fixed destructor in `RegionSelectionManager` so that `RegionSelection` + objects allocated inside the `region_vector` are properly destructed upon + existing `scope/destruction` of `RegionSelectionManager`. + ([#113](https://github.com/MadAnalysis/madanalysis5/pull/113)) + + ## Contributors -This release contains contributions from (in alphabetical order): \ No newline at end of file +This release contains contributions from (in alphabetical order): + +[Kyle Fan](https://github.com/kfan326) diff --git a/madanalysis/IOinterface/job_reader.py b/madanalysis/IOinterface/job_reader.py index 30bb01e3..112e9f67 100644 --- a/madanalysis/IOinterface/job_reader.py +++ b/madanalysis/IOinterface/job_reader.py @@ -30,6 +30,12 @@ from madanalysis.layout.histogram import Histogram from madanalysis.layout.histogram_logx import HistogramLogX from madanalysis.layout.histogram_frequency import HistogramFrequency +from madanalysis.IOinterface.sqlite_reader import getMeanAndStdev +from madanalysis.IOinterface.sqlite_reader import DBreader_debug +from madanalysis.IOinterface.sqlite_reader import getHistoStatisticsAvg + + + import glob import logging import shutil @@ -318,10 +324,18 @@ def ExtractHistos(self,dataset,plot,merging=False): while(os.path.isdir(self.safdir+"/"+name+"/MergingPlots_"+str(i))): i+=1 filename = self.safdir+"/"+name+"/MergingPlots_"+str(i-1)+"/Histograms/histos.saf" + ##file path for sqlite db + sqlite_db_filename = self.safdir+"/"+name+"/MergingPlots_"+str(i-1)+"/Histograms/histo.db" + + + else: while(os.path.isdir(self.safdir+"/"+name+"/MadAnalysis5job_"+str(i))): i+=1 filename = self.safdir+"/"+name+"/MadAnalysis5job_"+str(i-1)+"/Histograms/histos.saf" + ##file path for sqlite db + sqlite_db_filename = self.safdir+"/"+name+"/MadAnalysis5job_"+str(i-1)+"/Histograms/histo.db" + # Opening the file try: @@ -348,6 +362,16 @@ def ExtractHistos(self,dataset,plot,merging=False): data_negative = [] labels = [] + + ## SqliteDB extractor + sqlite_exists = os.path.isfile(sqlite_db_filename) + if sqlite_exists: + sqlite_output_dictionary = getMeanAndStdev(sqlite_db_filename) + histoStatistics = getHistoStatisticsAvg(sqlite_db_filename) + #DBreader_debug(sqlite_output_dictionary) + + + # Loop over the lines numline=0 for line in file: @@ -391,13 +415,40 @@ def ExtractHistos(self,dataset,plot,merging=False): elif words[0].lower()=='': histoTag.activate() elif words[0].lower()=='': + + histoTag.desactivate() plot.histos.append(copy.copy(histoinfo)) - plot.histos[-1].positive.array = data_positive[:] - plot.histos[-1].negative.array = data_negative[:] - histoinfo.Reset() - data_positive = [] - data_negative = [] + + if not sqlite_exists: + + plot.histos[-1].positive.array = data_positive[:] + plot.histos[-1].negative.array = data_negative[:] + histoinfo.Reset() + bin_means = [] + bin_stdev = [] + else: + bin_means = [] + bin_stdev = [] + + # save bin mean and stdev into histogram_core for positive and negative values + for bin_index in sqlite_output_dictionary[histoinfo.name]: + if bin_index not in ["underflow", "overflow"]: + bin_means.append(sqlite_output_dictionary[histoinfo.name][bin_index][0]) + bin_stdev.append(sqlite_output_dictionary[histoinfo.name][bin_index][1]) + elif bin_index == "underflow": + plot.histos[-1].positive.underflow = sqlite_output_dictionary[histoinfo.name][bin_index][0] + elif bin_index == "overflow": + plot.histos[-1].positive.overflow = sqlite_output_dictionary[histoinfo.name][bin_index][0] + + histoinfo.Reset() + + plot.histos[-1].positive.array = bin_means[:] + ##plot.histos[-1].negative.array = bin_means[:] + plot.histos[-1].positive.stdev = bin_stdev[:] + ##plot.histos[-1].negative.stdev = bin_stdev[:] + + elif words[0].lower()=='': histoFreqTag.activate() elif words[0].lower()=='': @@ -415,11 +466,35 @@ def ExtractHistos(self,dataset,plot,merging=False): elif words[0].lower()=='': histoLogXTag.desactivate() plot.histos.append(copy.copy(histologxinfo)) - plot.histos[-1].positive.array = data_positive[:] - plot.histos[-1].negative.array = data_negative[:] - histologxinfo.Reset() - data_positive = [] - data_negative = [] + + if not sqlite_exists: + + plot.histos[-1].positive.array = data_positive[:] + plot.histos[-1].negative.array = data_negative[:] + histologxinfo.Reset() + data_positive = [] + data_negative = [] + + else: + bin_means = [] + bin_stdev = [] + + # save bin mean and stdev into histogram_core for positive and negative values + for bin_index in sqlite_output_dictionary[histologxinfo.name]: + if bin_index not in ["underflow", "overflow"]: + bin_means.append(sqlite_output_dictionary[histoinfo.name][bin_index][0]) + bin_stdev.append(sqlite_output_dictionary[histoinfo.name][bin_index][1]) + elif bin_index == "underflow": + plot.histos[-1].positive.underflow = sqlite_output_dictionary[histologxinfo.name][bin_index][0] + elif bin_index == "overflow": + plot.histos[-1].positive.overflow = sqlite_output_dictionary[histologxinfo.name][bin_index][0] + + histologxinfo.Reset() + + plot.histos[-1].positive.array = bin_means[:] + ##plot.histos[-1].negative.array = bin_means[:] + plot.histos[-1].positive.stdev = bin_stdev[:] + ##plot.histos[-1].negative.stdev = bin_stdev[:] # Looking from histogram description elif descriptionTag.activated: @@ -464,80 +539,143 @@ def ExtractHistos(self,dataset,plot,merging=False): # Looking from histogram statistics elif statisticsTag.activated and len(words)==2: + if statisticsTag.Nlines==0: results = self.ExtractStatisticsInt(words,numline,filename) + if histoTag.activated: - histoinfo.positive.nevents=results[0] - histoinfo.negative.nevents=results[1] + if sqlite_exists: + histoinfo.positive.nevents = int(histoStatistics[histoinfo.name][0]) + histoinfo.negative.nevents = int(histoStatistics[histoinfo.name][1]) + else : + histoinfo.positive.nevents=results[0] + histoinfo.negative.nevents=results[1] elif histoLogXTag.activated: - histologxinfo.positive.nevents=results[0] - histologxinfo.negative.nevents=results[1] + if sqlite_exists: + histologxinfo.positive.nevents = int(histoStatistics[histologxinfo.name][0]) + histologxinfo.negative.nevents = int(histoStatistics[histologxinfo.name][1]) + else : + histologxinfo.positive.nevents=results[0] + histologxinfo.negative.nevents=results[1] elif histoFreqTag.activated: histofreqinfo.positive.nevents=results[0] histofreqinfo.negative.nevents=results[1] elif statisticsTag.Nlines==1: results = self.ExtractStatisticsFloat(words,numline,filename) + if histoTag.activated: - histoinfo.positive.sumwentries=results[0] - histoinfo.negative.sumwentries=results[1] + if sqlite_exists: + histoinfo.positive.sumwentries = histoStatistics[histoinfo.name][2] + histoinfo.negative.sumwentries = histoStatistics[histoinfo.name][3] + else : + histoinfo.positive.sumwentries=results[0] + histoinfo.negative.sumwentries=results[1] elif histoLogXTag.activated: - histologxinfo.positive.sumwentries=results[0] - histologxinfo.negative.sumwentries=results[1] + if sqlite_exists: + histologxinfo.positive.sumwentries = histoStatistics[histologxinfo.name][2] + histologxinfo.negative.sumwentries = histoStatistics[histologxinfo.name][3] + else : + histologxinfo.positive.sumwentries=results[0] + histologxinfo.negative.sumwentries=results[1] elif histoFreqTag.activated: histofreqinfo.positive.sumwentries=results[0] histofreqinfo.negative.sumwentries=results[1] elif statisticsTag.Nlines==2: results = self.ExtractStatisticsInt(words,numline,filename) + if histoTag.activated: - histoinfo.positive.nentries=results[0] - histoinfo.negative.nentries=results[1] + if sqlite_exists: + histoinfo.positive.nentries = int(histoStatistics[histoinfo.name][4]) + histoinfo.negative.nentries = int(histoStatistics[histoinfo.name][5]) + else : + histoinfo.positive.nentries=results[0] + histoinfo.negative.nentries=results[1] elif histoLogXTag.activated: - histologxinfo.positive.nentries=results[0] - histologxinfo.negative.nentries=results[1] + if sqlite_exists: + histologxinfo.positive.nentries = int(histoStatistics[histologxinfo.name][4]) + histologxinfo.negative.nentries = int(histoStatistics[histologxinfo.name][5]) + else : + histologxinfo.positive.nentries=results[0] + histologxinfo.negative.nentries=results[1] elif histoFreqTag.activated: histofreqinfo.positive.nentries=results[0] histofreqinfo.negative.nentries=results[1] elif statisticsTag.Nlines==3: results = self.ExtractStatisticsFloat(words,numline,filename) + if histoTag.activated: - histoinfo.positive.sumw=results[0] - histoinfo.negative.sumw=results[1] + if sqlite_exists: + histoinfo.positive.sumw = histoStatistics[histoinfo.name][6] + histoinfo.negative.sumw = histoStatistics[histoinfo.name][7] + else : + histoinfo.positive.sumw=results[0] + histoinfo.negative.sumw=results[1] elif histoLogXTag.activated: - histologxinfo.positive.sumw=results[0] - histologxinfo.negative.sumw=results[1] + if sqlite_exists: + histologxinfo.positive.sumw = histoStatistics[histologxinfo.name][6] + histologxinfo.negative.sumw = histoStatistics[histologxinfo.name][7] + else : + histologxinfo.positive.sumw=results[0] + histologxinfo.negative.sumw=results[1] elif histoFreqTag.activated: histofreqinfo.positive.sumw=results[0] histofreqinfo.negative.sumw=results[1] elif statisticsTag.Nlines==4 and not histoFreqTag.activated: results = self.ExtractStatisticsFloat(words,numline,filename) + if histoTag.activated: - histoinfo.positive.sumw2=results[0] - histoinfo.negative.sumw2=results[1] + if sqlite_exists: + histoinfo.positive.sumw2 = histoStatistics[histoinfo.name][8] + histoinfo.negative.sumw2 = histoStatistics[histoinfo.name][9] + else : + histoinfo.positive.sumw2=results[0] + histoinfo.negative.sumw2=results[1] elif histoLogXTag.activated: - histologxinfo.positive.sumw2=results[0] - histologxinfo.negative.sumw2=results[1] - + if sqlite_exists: + histologxinfo.positive.sumw2 = histoStatistics[histologxinfo.name][8] + histologxinfo.negative.sumw2 = histoStatistics[histologxinfo.name][9] + else : + histologxinfo.positive.sumw2=results[0] + histologxinfo.negative.sumw2=results[1] elif statisticsTag.Nlines==5 and not histoFreqTag.activated: results = self.ExtractStatisticsFloat(words,numline,filename) + if histoTag.activated: - histoinfo.positive.sumwx=results[0] - histoinfo.negative.sumwx=results[1] + if sqlite_exists: + histoinfo.positive.sumwx = histoStatistics[histoinfo.name][10] + histoinfo.negative.sumwx = histoStatistics[histoinfo.name][11] + else : + histoinfo.positive.sumwx=results[0] + histoinfo.negative.sumwx=results[1] elif histoLogXTag.activated: - histologxinfo.positive.sumwx=results[0] - histologxinfo.negative.sumwx=results[1] + if sqlite_exists: + histologxinfo.positive.sumwx = histoStatistics[histologxinfo.name][10] + histologxinfo.negative.sumwx = histoStatistics[histologxinfo.name][11] + else : + histologxinfo.positive.sumwx=results[0] + histologxinfo.negative.sumwx=results[1] elif statisticsTag.Nlines==6 and not histoFreqTag.activated: results = self.ExtractStatisticsFloat(words,numline,filename) + if histoTag.activated: - histoinfo.positive.sumw2x=results[0] - histoinfo.negative.sumw2x=results[1] + if sqlite_exists: + histoinfo.positive.sumw2x = histoStatistics[histoinfo.name][12] + histoinfo.negative.sumw2x = histoStatistics[histoinfo.name][13] + else : + histoinfo.positive.sumw2x=results[0] + histoinfo.negative.sumw2x=results[1] elif histoLogXTag.activated: - histologxinfo.positive.sumw2x=results[0] - histologxinfo.negative.sumw2x=results[1] + if sqlite_exists: + histologxinfo.positive.sumw2x = histoStatistics[histologxinfo.name][12] + histologxinfo.negative.sumw2x = histoStatistics[histologxinfo.name][13] + else : + histologxinfo.positive.sumw2x=results[0] + histologxinfo.negative.sumw2x=results[1] else: logging.getLogger('MA5').warning('Extra line is found: '+line) diff --git a/madanalysis/IOinterface/job_writer.py b/madanalysis/IOinterface/job_writer.py index 07941a7d..cb72197c 100644 --- a/madanalysis/IOinterface/job_writer.py +++ b/madanalysis/IOinterface/job_writer.py @@ -750,6 +750,7 @@ def WriteMakefiles(self, option="", **kwargs): options.has_root_inc = self.main.archi_info.has_root options.has_root_lib = self.main.archi_info.has_root + options.has_sqlite = self.main.archi_info.has_sqlite3 #options.has_userpackage = True toRemove=['Log/compilation.log','Log/linking.log','Log/cleanup.log','Log/mrproper.log'] diff --git a/madanalysis/IOinterface/library_writer.py b/madanalysis/IOinterface/library_writer.py index 39812f29..bdc74271 100644 --- a/madanalysis/IOinterface/library_writer.py +++ b/madanalysis/IOinterface/library_writer.py @@ -131,7 +131,7 @@ def WriteMakefileForInterfaces(self,package): filename = self.path+"/SampleAnalyzer/Test/Makefile_delphesMA5tune" elif package=='test_root': filename = self.path+"/SampleAnalyzer/Test/Makefile_root" - + # Header title='' if package=='commons': @@ -239,10 +239,13 @@ def WriteMakefileForInterfaces(self,package): options.ma5_fastjet_mode = self.main.archi_info.has_fastjet options.has_fastjet_inc = self.main.archi_info.has_fastjet options.has_fastjet_lib = self.main.archi_info.has_fastjet + #options.has_sqlite_lib = self.main.archi_info.has_sqlite3 + options.has_sqlite_tag = self.main.archi_info.has_sqlite3 # options.has_fastjet_ma5lib = self.main.archi_info.has_fastjet toRemove.extend(['compilation.log','linking.log','cleanup.log','mrproper.log']) elif package=='test_commons': options.has_commons = True + options.has_sqlite_tag = self.main.archi_info.has_sqlite3 toRemove.extend(['compilation_commons.log','linking_commons.log','cleanup_commons.log','mrproper_commons.log','../Bin/TestCommons.log']) elif package=='zlib': options.has_commons = True @@ -252,6 +255,7 @@ def WriteMakefileForInterfaces(self,package): elif package=='test_zlib': options.has_commons = True options.has_zlib_ma5lib = True + options.has_sqlite_tag = self.main.archi_info.has_sqlite3 # options.has_zlib_lib = True toRemove.extend(['compilation_zlib.log','linking_zlib.log','cleanup_zlib.log','mrproper_zlib.log','../Bin/TestZlib.log']) elif package=='delphes': @@ -324,6 +328,8 @@ def WriteMakefileForInterfaces(self,package): options.has_fastjet_lib = self.main.archi_info.has_fastjet options.ma5_fastjet_mode = self.main.archi_info.has_fastjet options.has_substructure = self.main.archi_info.has_fjcontrib and self.main.archi_info.has_fastjet + options.has_sqlite_tag = self.main.archi_info.has_sqlite3 + options.has_sqlite_lib = self.main.archi_info.has_sqlite3 toRemove.extend(['compilation.log','linking.log','cleanup.log','mrproper.log']) elif package=='test_process': @@ -342,6 +348,8 @@ def WriteMakefileForInterfaces(self,package): # options.has_delphesMA5tune_tag = self.main.archi_info.has_delphesMA5tune # options.has_zlib_tag = self.main.archi_info.has_zlib toRemove.extend(['compilation_process.log','linking_process.log','cleanup_process.log','mrproper_process.log','../Bin/TestSampleAnalyzer.log']) + elif package=='sqlite': + options.has_sqlite = self.main.archi_info.has_sqlite3 # file pattern if package in ['commons','process','configuration']: @@ -373,7 +381,7 @@ def WriteMakefileForInterfaces(self,package): hfiles = ['DelphesMA5tune/*.h'] elif package=='test_root': cppfiles = ['Root/*.cpp'] - hfiles = ['Root/*.h'] + hfiles = ['Root/*.h'] else: cppfiles = [package+'/*.cpp'] hfiles = [package+'/*.h'] diff --git a/madanalysis/IOinterface/sqlite_reader.py b/madanalysis/IOinterface/sqlite_reader.py new file mode 100644 index 00000000..60ff5bb4 --- /dev/null +++ b/madanalysis/IOinterface/sqlite_reader.py @@ -0,0 +1,193 @@ +import sqlite3 +from matplotlib import pyplot as plt +import numpy as np +import math +import statistics + + +def getMeanAndStdevOld(path): + + con = sqlite3.connect(path) + cursor = con.cursor() + + bin_data = cursor.execute("select * from data;").fetchall() + + pos_bins = dict() + neg_bins = dict() + + ## bin_data has all data for the histogram, need to get mean and standard deviation for each bin + ## each row of the query is a tuple of 5 elements [histo name, weight id, bin #, positive value, negative value] + ## sort them into +bin/-bin[name] -> bin # -> [mean, standard deviation] + + for row in bin_data: + ## if the histo name is not inside the bin dictionaries, create a new dictionary for each of +/- bin dictionary + ## append values to +/-bin[name][bin#] + + if row[0] not in pos_bins or row[0] not in neg_bins: + pos_bins[row[0]] = dict() + neg_bins[row[0]] = dict() + pos_bins[row[0]][row[2]] = [float(row[3])] + neg_bins[row[0]][row[2]] = [float(row[4])] + + else: + if row[2] in pos_bins[row[0]] or row[2] in neg_bins[row[0]]: + pos_bins[row[0]][row[2]].append(float(row[3])) + neg_bins[row[0]][row[2]].append(float(row[4])) + else : + pos_bins[row[0]][row[2]] = [float(row[3])] + neg_bins[row[0]][row[2]] = [float(row[4])] + + output = dict() + + for histo_name in pos_bins: + output[histo_name] = dict() + for bin_i in pos_bins[histo_name]: + output[histo_name][bin_i] = [statistics.mean(pos_bins[histo_name][bin_i]), statistics.stdev(pos_bins[histo_name][bin_i])] + + for histo_name in neg_bins: + for bin_i in neg_bins[histo_name]: + output[histo_name][bin_i].extend([statistics.mean(neg_bins[histo_name][bin_i]), statistics.stdev(neg_bins[histo_name][bin_i])]) + + return output + + +def getStatistics(stats): + histoname_dict = dict() + for entry in stats: + if entry[0] not in histoname_dict: + histoname_dict[entry[0]] = dict() + sumw = float(entry[2]) - abs(float(entry[3])) + if sumw == 0: + raise Exception("sumw is 0 for histo " + entry[0] + " and weight id " + entry[1]) + histoname_dict[entry[0]][entry[1]] = sumw + return histoname_dict + + +def getMeanAndStdev(path): + + con = sqlite3.connect(path) + cursor = con.cursor() + bin_data = cursor.execute("select * from data;").fetchall() + stats_data = cursor.execute("select name, id, pos_sum_event_weights_over_events, neg_sum_event_weights_over_events from Statistics").fetchall() + + statsdict = getStatistics(stats_data) + + + ## parse data in the form of parsed_data[histo_name][bin #][{positive value, negative value}] + parsed_data = dict() + for row in bin_data: + + histo_name = row[0] + weight_id = row[1] + bin_number = row[2] + sumw = statsdict[histo_name][str(weight_id)] + + value = (float(row[3]) - abs(float(row[4]))) /sumw + if histo_name not in parsed_data: + ## if histo name is not in the parsed_data dictionary, then create a new bin dictionary for that histo, then for the bin, create a weigh id dictionary + parsed_data[histo_name] = dict() + parsed_data[histo_name][bin_number] = [] + + else: + ## since histo name is in the parsed_data dictionary, we need to check if the bin in the dictioary, if not then create a weight id dictionary for that bin + if bin_number not in parsed_data[histo_name]: + parsed_data[histo_name][bin_number] = [] + + parsed_data[histo_name][bin_number].append(value) + + output = dict() + for histo_name in parsed_data: + output[histo_name] = dict() + for bin_number in parsed_data[histo_name]: + output[histo_name][bin_number] = [statistics.mean(parsed_data[histo_name][bin_number]), statistics.stdev(parsed_data[histo_name][bin_number])] + + return output + +def getHistoStatisticsAvg(path): + + con = sqlite3.connect(path) + cursor = con.cursor() + + + statistics = cursor.execute("select name, avg(pos_num_events), avg(neg_num_events), avg(pos_sum_event_weights_over_events), avg(neg_sum_event_weights_over_events), avg(pos_entries), avg(neg_entries), avg(pos_sum_event_weights_over_entries), avg(neg_sum_event_weights_over_entries), avg(pos_sum_squared_weights), avg(neg_sum_squared_weights), avg(pos_value_times_weight), avg(neg_value_times_weight), avg(pos_value_squared_times_weight), avg(neg_value_squared_times_weight) from Statistics group by name;").fetchall() + + statdict = dict() + for i in range(len(statistics)): + statdict[statistics[i][0]] = statistics[i][1:] + + return statdict; + + + + + + +## debug for printing out output dictionary +## structure is as follows: +## output[histogram_name][bin #] = [positive mean, positive stdev, negative mean, negative stddev] + + +def DBreader_debug(output): + + for name in output: + print(name) + for eachbin in output[name]: + print(eachbin) + for val in output[name][eachbin]: + print(val) + + + for histo in output: + num_of_keys = len(output[histo].keys()) + labels = [None] * num_of_keys + for i in range(1,num_of_keys): + labels[i] = i + labels[0] = 'underflow' + labels[num_of_keys-1] = 'overflow' + positives = [None] * num_of_keys + negatives = [None] * num_of_keys + for row in output[histo]: + if(row == 'underflow'): + positives[0] = output[histo][row][0] + negatives[0] = output[histo][row][2] + elif(row == 'overflow'): + positives[num_of_keys-1] = output[histo][row][0] + negatives[num_of_keys-1] = output[histo][row][2] + else: + positives[int(row)] = output[histo][row][0] + negatives[int(row)] = output[histo][row][2] + #for lable in lables: + # print(lable) + #for val in positives: + # print(val) + #for val in negatives: + # print(val) + x = np.arange(num_of_keys) + width = 0.5 + fig, ax = plt.subplots() + rects1 = ax.bar(x - width/3, positives, width, label="positives avg") + rects2 = ax.bar(x + width/3, negatives, width, label="negatives avg") + + ax.set_ylabel('Events Luminosity = ') + ax.set_title(histo) + ax.set_xticks(x, labels, rotation = 65) + ax.legend() + + #ax.bar_label(rects1, padding=3) + #ax.bar_label(rects2, padding=3) + + fig.tight_layout() + plt.show() + + + + + + + + + + + + + diff --git a/madanalysis/build/makefile_writer.py b/madanalysis/build/makefile_writer.py index f3ff61f4..208b3954 100644 --- a/madanalysis/build/makefile_writer.py +++ b/madanalysis/build/makefile_writer.py @@ -41,6 +41,7 @@ def __init__(self): self.has_fastjet = False self.has_delphes = False self.has_delphesMA5tune = False + self.has_sqlite3 = False @staticmethod @@ -98,7 +99,10 @@ def UserfriendlyMakefileForSampleAnalyzer(filename,options): file.write('\tcd Test && $(MAKE) -f Makefile_delphesMA5tune\n') if options.has_process: file.write('\tcd Process && $(MAKE) -f Makefile\n') - file.write('\tcd Test && $(MAKE) -f Makefile_process\n') + file.write('\tcd Test && $(MAKE) -f Makefile_process\n') + if options.has_sqlite3: + file.write('\tcd Interfaces && $(MAKE) -f Makefile_sqlite\n') + file.write('\tcd Test && $(MAKE) -f Makefile_sqlite\n') file.write('\n') # Clean @@ -125,6 +129,9 @@ def UserfriendlyMakefileForSampleAnalyzer(filename,options): if options.has_process: file.write('\tcd Process && $(MAKE) -f Makefile clean\n') file.write('\tcd Test && $(MAKE) -f Makefile_process clean\n') + if options.has_sqlite3: + file.write('\tcd Interfaces && $(MAKE) -f Makefile_sqlite clean\n') + file.write('\tcd Test && $(MAKE) -f Makefile_sqlite clean\n') file.write('\n') # Mrproper @@ -152,6 +159,9 @@ def UserfriendlyMakefileForSampleAnalyzer(filename,options): if options.has_process: file.write('\tcd Process && $(MAKE) -f Makefile mrproper\n') file.write('\tcd Test && $(MAKE) -f Makefile_process mrproper\n') + if options.has_sqlite3: + file.write('\tcd Interfaces && $(MAKE) -f Makefile_sqlite mrproper\n') + file.write('\tcd Test && $(MAKE) -f Makefile_sqlite mrproper\n') file.write('\n') # Closing the file @@ -194,6 +204,9 @@ def __init__(self): self.has_root_tag = False self.has_root_lib = False self.has_root_ma5lib = False + self.has_sqlite = False + self.has_sqlite_tag = False + self.has_sqlite_lib = False @@ -321,7 +334,9 @@ def Makefile( for header in archi_info.delphesMA5tune_inc_paths: cxxflags.extend(['-I'+header]) file.write('CXXFLAGS += '+' '.join(cxxflags)+'\n') - + + + # - tags cxxflags=[] if options.has_root_tag: @@ -338,6 +353,8 @@ def Makefile( cxxflags.extend(['-DDELPHES_USE']) if options.has_delphesMA5tune_tag: cxxflags.extend(['-DDELPHESMA5TUNE_USE']) + if options.has_sqlite_tag: + cxxflags.extend(['-DSQLITE3_USE']) if len(cxxflags)!=0: file.write('CXXFLAGS += '+' '.join(cxxflags)+'\n') file.write('\n') @@ -347,7 +364,9 @@ def Makefile( # - general libs=[] - file.write('LIBFLAGS = \n') + + # added SQL + #file.write('LIBFLAGS = -l sqlite3\n') # - commons if options.has_commons: @@ -429,6 +448,14 @@ def Makefile( if options.has_heptoptagger: file.write('LIBFLAGS += -lHEPTopTagger_for_ma5\n') + # SQLite3 + if options.has_sqlite: + file.write('LIBFLAGS += -l sqlite3\n') + + if options.has_sqlite_lib: + file.write('LIBFLAGS += -l sqlite_for_ma5\n') + + # - Commons if options.has_commons: libs=[] @@ -464,6 +491,8 @@ def Makefile( libs.append('$(MA5_BASE)/tools/SampleAnalyzer/Lib/libsubstructure_for_ma5.so') if options.has_heptoptagger: libs.append('$(MA5_BASE)/tools/SampleAnalyzer/Lib/libHEPTopTagger_for_ma5.so') + if options.has_sqlite_lib: + libs.append('$(MA5_BASE)/tools/SampleAnalyzer/Lib/libsqlite_for_ma5.so') if len(libs)!=0: file.write('# Requirements to check before building\n') for ind in range(0,len(libs)): diff --git a/madanalysis/core/library_builder.py b/madanalysis/core/library_builder.py index cdb2c685..d6f476f0 100644 --- a/madanalysis/core/library_builder.py +++ b/madanalysis/core/library_builder.py @@ -80,6 +80,10 @@ def checkMA5(self): libraries.append(self.archi_info.ma5dir+'/tools/SampleAnalyzer/Lib/libdelphes_for_ma5.so') if self.archi_info.has_delphesMA5tune: libraries.append(self.archi_info.ma5dir+'/tools/SampleAnalyzer/Lib/libdelphesMA5tune_for_ma5.so') + if self.archi_info.has_sqlite3: + libraries.append(self.archi_info.ma5dir+'/tools/SampleAnalyzer/Lib/libsqlite_for_ma5.so') + + for library in libraries: if not os.path.isfile(library): self.logger.debug('\t-> library '+ library + " not found.") diff --git a/madanalysis/core/main.py b/madanalysis/core/main.py index 570b539a..85ec8876 100644 --- a/madanalysis/core/main.py +++ b/madanalysis/core/main.py @@ -626,6 +626,13 @@ def BuildLibrary(self,forced=False): self.archi_info.ma5dir + '/tools/SampleAnalyzer/Bin/TestRoot', self.archi_info.ma5dir + '/tools/SampleAnalyzer/Test/', True]) + # SQLite3 + if self.archi_info.has_sqlite3: + libraries.append(['SQLite3', 'interface to SQLite3', 'sqlite', + self.archi_info.ma5dir + '/tools/SampleAnalyzer/Lib/libsqlite_for_ma5.so', + self.archi_info.ma5dir + '/tools/SampleAnalyzer/Interfaces', False]) + + # Process libraries.append(['process', 'SampleAnalyzer core', 'process', self.archi_info.ma5dir + '/tools/SampleAnalyzer/Lib/libprocess_for_ma5.so', @@ -634,6 +641,7 @@ def BuildLibrary(self,forced=False): self.archi_info.ma5dir + '/tools/SampleAnalyzer/Bin/TestSampleAnalyzer', self.archi_info.ma5dir + '/tools/SampleAnalyzer/Test/', True]) + # Writing the Makefiles self.logger.info("") @@ -659,7 +667,8 @@ def BuildLibrary(self,forced=False): options.has_zlib = self.archi_info.has_zlib options.has_fastjet = self.archi_info.has_fastjet options.has_delphes = self.archi_info.has_delphes - options.has_delphesMA5tune = self.archi_info.has_delphesMA5tune + options.has_delphesMA5tune = self.archi_info.has_delphesMA5tune + options.has_sqlite3 = self.archi_info.has_sqlite3 #MakefileWriter.UserfriendlyMakefileForSampleAnalyzer(self.archi_info.ma5dir+'/tools/SampleAnalyzer/Makefile',options) # Writing the setup diff --git a/madanalysis/job/job_execute.py b/madanalysis/job/job_execute.py index 6380d211..9137612b 100644 --- a/madanalysis/job/job_execute.py +++ b/madanalysis/job/job_execute.py @@ -43,7 +43,7 @@ def WriteExecute(file,main,part_list): file.write(' if (weighted_events_ && event.mc()!=0) ' +\ '__event_weight__ = event.mc()->weight();\n\n') file.write(' if (sample.mc()!=0) sample.mc()->addWeightedEvents(__event_weight__);\n') - file.write(' Manager()->InitializeForNewEvent(__event_weight__);\n') + file.write(' Manager()->InitializeForNewEvent(__event_weight__, event.mc()->multiweights().GetWeights());\n') file.write('\n') # Reseting instance name diff --git a/madanalysis/layout/histogram.py b/madanalysis/layout/histogram.py index 949eaf8b..0c4992b5 100644 --- a/madanalysis/layout/histogram.py +++ b/madanalysis/layout/histogram.py @@ -85,7 +85,7 @@ def FinalizeReading(self,main,dataset): # Data data = [] for i in range(0,len(self.positive.array)): - data.append(self.positive.array[i]-self.negative.array[i]) + data.append(self.positive.array[i]) if data[-1]<0: self.warnings.append(\ 'dataset='+dataset.name+\ @@ -95,6 +95,14 @@ def FinalizeReading(self,main,dataset): data[-1]=0 self.summary.array = data[:] # [:] -> clone of data + #stdev + stdev_data = [] + for i in range(0, len(self.positive.array)): + stdev_data.append(max(0, self.positive.stdev[i])) + self.summary.stdev = stdev_data[:] + + + # Integral self.positive.ComputeIntegral() self.negative.ComputeIntegral() diff --git a/madanalysis/layout/histogram_core.py b/madanalysis/layout/histogram_core.py index 0a3355a4..bb566599 100644 --- a/madanalysis/layout/histogram_core.py +++ b/madanalysis/layout/histogram_core.py @@ -50,6 +50,7 @@ def __init__(self): self.nan = 0. self.inf = 0. self.array = [] + self.stdev = [] def ComputeIntegral(self): diff --git a/madanalysis/layout/histogram_frequency_core.py b/madanalysis/layout/histogram_frequency_core.py index d797d74a..377fa93e 100644 --- a/madanalysis/layout/histogram_frequency_core.py +++ b/madanalysis/layout/histogram_frequency_core.py @@ -35,6 +35,7 @@ def __init__(self): self.overflow = 0. self.underflow = 0. self.array = [] + self.stdev = [] def ComputeIntegral(self): self.integral = 0 diff --git a/madanalysis/layout/histogram_logx.py b/madanalysis/layout/histogram_logx.py index bb16edbf..386be6f5 100644 --- a/madanalysis/layout/histogram_logx.py +++ b/madanalysis/layout/histogram_logx.py @@ -111,6 +111,8 @@ def FinalizeReading(self,main,dataset): data[-1]=0 self.summary.array = data[:] # [:] -> clone of data + + # Integral self.positive.ComputeIntegral() self.negative.ComputeIntegral() diff --git a/madanalysis/layout/plotflow.py b/madanalysis/layout/plotflow.py index 8fe99d39..1535b443 100644 --- a/madanalysis/layout/plotflow.py +++ b/madanalysis/layout/plotflow.py @@ -640,8 +640,17 @@ def DrawMATPLOTLIB(self,histos,scales,ref,irelhisto,filenamePy,outputnames): outputPy.write(str(histos[ind].summary.array[bin-1]*scales[ind])) outputPy.write('])\n\n') + #stdev + for ind in range(0, len(histos)): + outputPy.write(' # Creating array for stdev\n') + outputPy.write(' err = numpy.array([') + for bin in range(1, xnbin+1): + if bin !=1: + outputPy.write(',') + outputPy.write(str(histos[ind].summary.stdev[bin-1]*scales[ind])) + outputPy.write('])\n\n') - + # Canvas outputPy.write(' # Creating a new Canvas\n') dpi=80 @@ -795,7 +804,7 @@ def DrawMATPLOTLIB(self,histos,scales,ref,irelhisto,filenamePy,outputnames): mylinewidth = self.main.datasets[ind].linewidth mylinestyle = LineStyleType.convert2matplotlib(self.main.datasets[ind].linestyle) - outputPy.write(' pad.hist('+\ + outputPy.write(' n, _, _ = pad.hist('+\ 'x=xData, '+\ 'bins=xBinning, '+\ 'weights='+myweights+',\\\n'+\ @@ -824,6 +833,10 @@ def DrawMATPLOTLIB(self,histos,scales,ref,irelhisto,filenamePy,outputnames): ' orientation="vertical")\n\n') outputPy.write('\n') + + # error bar + outputPy.write(' plt.errorbar(xData, n, yerr = err, fmt=\'r.\')\n') + # Label outputPy.write(' # Axis\n') outputPy.write(" plt.rc('text',usetex=False)\n") diff --git a/madanalysis/system/architecture_info.py b/madanalysis/system/architecture_info.py index 53b363b8..640a82e3 100644 --- a/madanalysis/system/architecture_info.py +++ b/madanalysis/system/architecture_info.py @@ -47,6 +47,10 @@ def __init__(self): self.has_delphes = False self.has_delphesMA5tune = False + # Flag for SQlite3 + self.has_sqlite3 = False + + # Library to put before all the others? self.root_priority = False self.zlib_priority = False @@ -98,6 +102,7 @@ def __init__(self): self.delphesMA5tune_lib="" self.fastjet_bin_path="" self.fastjet_lib_paths=[] + # C++ compiler versions self.cpp11 = False diff --git a/madanalysis/system/checkup.py b/madanalysis/system/checkup.py index 4629f6f7..1c788c18 100644 --- a/madanalysis/system/checkup.py +++ b/madanalysis/system/checkup.py @@ -328,6 +328,8 @@ def CheckOptionalProcessingPackages(self): return False if not self.checker.Execute('root'): return False + if not self.checker.Execute('sqlite'): + return False self.archi_info.has_delphes = checker2.checkDelphes() self.archi_info.has_delphesMA5tune = checker2.checkDelphesMA5tune() diff --git a/madanalysis/system/detect_manager.py b/madanalysis/system/detect_manager.py index 318399bf..aadcf1e4 100644 --- a/madanalysis/system/detect_manager.py +++ b/madanalysis/system/detect_manager.py @@ -126,6 +126,9 @@ def Execute(self, rawpackage): elif package=='latex': from madanalysis.system.detect_latex import DetectLatex checker=DetectLatex(self.archi_info, self.user_info, self.session_info, self.debug) + elif package=='sqlite': + from madanalysis.system.detect_sqlite import DetectSQLite + checker=DetectSQLite(self.archi_info, self.user_info, self.session_info, self.debug) else: self.logger.error('the package "'+rawpackage+'" is unknown') return False diff --git a/madanalysis/system/detect_sqlite.py b/madanalysis/system/detect_sqlite.py new file mode 100644 index 00000000..d74e85a6 --- /dev/null +++ b/madanalysis/system/detect_sqlite.py @@ -0,0 +1,96 @@ +################################################################################ +# +# Copyright (C) 2012-2022 Jack Araz, Eric Conte & Benjamin Fuks +# The MadAnalysis development team, email: +# +# This file is part of MadAnalysis 5. +# Official website: +# +# MadAnalysis 5 is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# MadAnalysis 5 is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with MadAnalysis 5. If not, see +# +################################################################################ + + +from __future__ import absolute_import +import logging +import glob +import os +import sys +import re +import platform +from shell_command import ShellCommand +from madanalysis.enumeration.detect_status_type import DetectStatusType + + +class DetectSQLite: + + def __init__(self, archi_info, user_info, session_info, debug): + # mandatory options + self.archi_info = archi_info + self.user_info = user_info + self.session_info = session_info + self.debug = debug + self.name = 'SQLite3' + self.mandatory = False + self.log = [] + self.logger = logging.getLogger('MA5') + + # adding what you want here + self.headernames=['DatabaseManager.h'] + + def PrintDisableMessage(self): + self.logger.warning("sqlite3 not detected, multiweight output format will not be supported!") + + + def IsItVetoed(self): + if self.user_info.sqlite_veto: + self.logger.debug("user setting: veto on SQLite3") + return True + else: + self.logger.debug("no user veto") + return False + + + def AutoDetection(self): + # Which + result = ShellCommand.Which('sqlite3',all=False,mute=True) + if len(result)==0: + return DetectStatusType.UNFOUND,'' + if self.debug: + self.logger.debug(" which: " + str(result[0])) + + # Ok + return DetectStatusType.FOUND,'' + + + def ExtractInfo(self): + # Which all + if self.debug: + result = ShellCommand.Which('sqlite3',all=True,mute=True) + if len(result)==0: + return False + self.logger.debug(" which-all: ") + for file in result: + self.logger.debug(" - "+str(file)) + + # Ok + return True + + + def SaveInfo(self): + self.archi_info.has_sqlite3 = True + return True + + + diff --git a/madanalysis/system/user_info.py b/madanalysis/system/user_info.py index b13da86d..5d985040 100644 --- a/madanalysis/system/user_info.py +++ b/madanalysis/system/user_info.py @@ -88,6 +88,8 @@ def __init__(self): # dvipdf self.dvipdf_veto = None + self.sqlite_veto = None + # logger self.logger = logging.getLogger('MA5') diff --git a/requirements.txt b/requirements.txt index 352d7e20..f7820720 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,4 @@ scipy>=1.7.1 numpy>=1.19.5 pyhf==0.6.3 lxml>=4.6.2 + diff --git a/tools/SampleAnalyzer/Commons/Base/DatabaseManager.h b/tools/SampleAnalyzer/Commons/Base/DatabaseManager.h new file mode 100644 index 00000000..b48076d3 --- /dev/null +++ b/tools/SampleAnalyzer/Commons/Base/DatabaseManager.h @@ -0,0 +1,52 @@ +#pragma once + + +#include + +class SQLiteBase; + +class DatabaseManager { + private: + + SQLiteBase *manager; + + public: + DatabaseManager(std::string path); + + ~DatabaseManager(); + + void createCutflowTables(); + + void createHistoTables(); + + void createWeightNamesTable(); + + void addWeightDefinition(int id, std::string def); + + void addHisto(std::string name, int bins, double xmin, double xmax, std::string regions); + + void addStatistic(std::string name, + int id, + int pos_events, + int neg_events, + double pos_sum_events, + double neg_sum_events, + int pos_entries, + int neg_entries, + double pos_sum_entries, + double neg_sum_entries, + double pos_sum_squared, + double neg_sum_squared, + double pos_val_weight, + double neg_val_weight, + double pos_val2_weight, + double neg_val2_weight); + + void addData(std::string name, int id, std::string bin, double positive, double negative); + + void addCut(std::string r_name, std::string c_name); + + void addWeight(std::string r_name, std::string c_name, int id, int pos, int neg, double pos_sum, double neg_sum, double pos_2sum, double neg_2sum); + + void closeDB(); +}; diff --git a/tools/SampleAnalyzer/Commons/Base/OutputManager.h b/tools/SampleAnalyzer/Commons/Base/OutputManager.h new file mode 100644 index 00000000..ce288f4e --- /dev/null +++ b/tools/SampleAnalyzer/Commons/Base/OutputManager.h @@ -0,0 +1,66 @@ +#pragma once + +#include +#include +#include + + +#include "SampleAnalyzer/Process/Analyzer/AnalyzerBase.h" +#include "SampleAnalyzer/Commons/DataFormat/SampleFormat.h" + +//include all output type interfaces here +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + + +using namespace std; +using namespace MA5; + + + +//output manager object initializer takes in a AnalyzerBase pointer and a vector of arrays of size 3 +//each array constains [output type name, path to histogram file, path to cutflow file] +class OutputManager { + private: + //base pointer type OutputBase can take on any output interface, it's only purpose is to provide + //and execute function, new output formats require implementation of the output interface + vector > output_types_; + AnalyzerBase *analyzer_; + SampleFormat *samples_; + + public: + OutputManager(vector > output_types, AnalyzerBase *analyzer, SampleFormat *samples) : output_types_(output_types), analyzer_(analyzer), samples_(samples) {} + //for each output type, get output object from factory and call execute on it's interface + void Execute() { + + //implement each individual output type here + for(int i = 0; i < output_types_.size(); ++i){ + + //implementaton for SQLite + if(output_types_[i][0] == "sqlite"){ + DatabaseManager cutflow(output_types_[i][1]); + DatabaseManager histogram(output_types_[i][2]); + + histogram.createHistoTables(); + analyzer_->Manager()->GetPlotManager()->WriteSQL(histogram); + samples_->mc()->WriteWeightNames(histogram); + histogram.closeDB(); + + cutflow.createCutflowTables(); + bool addInitial = true; + samples_->mc()->WriteWeightNames(cutflow); + for(int i = 0; i < analyzer_->Manager()->Regions().size(); ++i){ + analyzer_->Manager()->Regions()[i]->WriteSQL(cutflow, addInitial); + + } + cutflow.closeDB(); + + } + } + } +}; + + + + + + diff --git a/tools/SampleAnalyzer/Commons/DataFormat/MCSampleFormat.h b/tools/SampleAnalyzer/Commons/DataFormat/MCSampleFormat.h index 9a09bd05..67b17881 100644 --- a/tools/SampleAnalyzer/Commons/DataFormat/MCSampleFormat.h +++ b/tools/SampleAnalyzer/Commons/DataFormat/MCSampleFormat.h @@ -36,7 +36,9 @@ #include "SampleAnalyzer/Commons/DataFormat/GeneratorInfo.h" #include "SampleAnalyzer/Commons/DataFormat/MCProcessFormat.h" #include "SampleAnalyzer/Commons/DataFormat/WeightDefinition.h" -#include "SampleAnalyzer/Commons/Service/LogService.h" +#include "SampleAnalyzer/Commons/Service/LogService.h" + +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" namespace MA5 @@ -80,6 +82,8 @@ class MCSampleFormat // ----------------------- multiweights ------------------------ WeightDefinition weight_definition_; + std::map weight_names; + // ----------------------- file info --------------------------- MAfloat64 xsection_; @@ -125,6 +129,19 @@ class MCSampleFormat sumweight_negative_ = 0.; } + //set weight names + void SetName(int id, std::string name) { + weight_names[id] = name; + } + + + void WriteWeightNames(DatabaseManager &db){ + db.createWeightNamesTable(); + for(auto &p : weight_names){ + db.addWeightDefinition(p.first, p.second); + } + } + /// Accessoir to the generator type const MA5GEN::GeneratorType* GeneratorType() const { return sample_generator_; } diff --git a/tools/SampleAnalyzer/Commons/DataFormat/WeightCollection.h b/tools/SampleAnalyzer/Commons/DataFormat/WeightCollection.h index 55a51c63..8b3cc431 100644 --- a/tools/SampleAnalyzer/Commons/DataFormat/WeightCollection.h +++ b/tools/SampleAnalyzer/Commons/DataFormat/WeightCollection.h @@ -66,6 +66,13 @@ class WeightCollection ~WeightCollection() { } + //copy constructor + WeightCollection(const WeightCollection &rhs){ + for(const auto &id_weights :rhs.weights_){ + weights_[id_weights.first]=id_weights.second; + } + } + /// Clear all the content void Reset() { weights_.clear(); } @@ -162,6 +169,47 @@ class WeightCollection } } + //multiply operator + WeightCollection& operator*=(const MAfloat64 multiple){ + for(auto &id_value : weights_){ + id_value.second *= multiple; + } + return *this; + } + + //add operator + WeightCollection& operator+=(const MAfloat64 input){ + for(auto &id_value : weights_){ + id_value.second += input; + } + return *this; + } + + //subtract operator + WeightCollection& operator-=(const MAfloat64 input){ + for(auto &id_value : weights_){ + id_value.second -= input; + } + return *this; + } + + //divide operator + WeightCollection& operator/=(const MAfloat64 input){ + for(auto &id_value : weights_){ + id_value.second /= input; + } + return *this; + } + + + //assignment operator + WeightCollection& operator=(const MAfloat64 input){ + for(auto &id_value : weights_){ + id_value.second = input; + } + return *this; + } + }; diff --git a/tools/SampleAnalyzer/Interfaces/sqlite/DatabaseManager.cpp b/tools/SampleAnalyzer/Interfaces/sqlite/DatabaseManager.cpp new file mode 100644 index 00000000..4ecd5b8b --- /dev/null +++ b/tools/SampleAnalyzer/Interfaces/sqlite/DatabaseManager.cpp @@ -0,0 +1,303 @@ +// Implementation of Database Manager Interface using Pointer to Implementation Design Pattern, SQLiteBase class is only implemented if SQLite package is detected by compile scripts + + + +#include +#include +#include + + +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + +using namespace std; + +class SQLiteBase{ + private: + // Pointer to SQLite connection + sqlite3 *db; + + // Save any error messages + char *zErrMsg; + + // Save the result of opening the file + int rc; + + // Saved SQL + string sql; + + // Create a callback function + static int callback(void *NotUsed, int argc, char **argv, char **azColName){ + + // int argc: holds the number of results + // (array) azColName: holds each column returned + // (array) argv: holds each value + + for(int i = 0; i < argc; i++) { + + // Show column name, value, and newline + cout << azColName[i] << ": " << argv[i] << endl; + + } + + // Insert a newline + cout << endl; + + // Return successful + return 0; + + } + + + bool checkDBErrors(string msg) { + if( rc ){ + // Show an error message + cout << "DB Error: " << sqlite3_errmsg(db) << " " << msg << endl; + return false; + } else { + cout << " success @ " << msg << endl; + return true; + } + + } + + + public: + + SQLiteBase(string path) { + // Save the result of opening the file + rc = sqlite3_open(path.c_str(), &db); + // enable foreign key constraint + sqlite3_exec(db, "PRAGMA foreign_keys = ON;", 0 ,0 ,0); + bool open_success = checkDBErrors("opening DB"); + if(!open_success) { + sqlite3_close(db); + cout << "open DB failed!" << endl; + } + } + + void createCutflowTables() { + + // Save SQL to create a table + sql = "CREATE TABLE IF NOT EXISTS Cutflow(" \ + "region_name TEXT NOT NULL,"\ + "cut_name TEXT NOT NULL," \ + "primary key (region_name, cut_name));"; + + // Run the SQL + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating cutflow table"); + + sql = "CREATE TABLE IF NOT EXISTS Weights(" \ + "r_name TEXT NOT NULL," \ + "c_name TEXT NOT NULL," \ + "id INTEGER NOT NULL," \ + "pos_entries INTEGER NOT NULL," \ + "neg_entries INTEGER NOT NULL," \ + "pos_sum DOUBLE NOT NULL," \ + "neg_sum DOUBLE NOT NULL," \ + "pos_squared_sum DOUBLE NOT NULL,"\ + "neg_squared_sum DOUBLE NOT NULL,"\ + "primary key (r_name, c_name, id)" \ + "foreign key (r_name, c_name) references Cutflow(region_name, cut_name) ON DELETE CASCADE);"; + + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating weights table"); + } + + + void createHistoTables() { + sql = "CREATE TABLE IF NOT EXISTS HistoDescription("\ + "name TEXT NOT NULL,"\ + "num_of_bins INTEGER NOT NULL,"\ + "xmin DOUBLE NOT NULL,"\ + "xmax DOUBLE NOT NULL,"\ + "regions TEXT NOT NULL,"\ + "primary key(name) );"; + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating HistoDescription table"); + + sql = "CREATE TABLE IF NOT EXISTS Statistics("\ + "name TEXT NOT NULL,"\ + "id TEXT NOT NULL,"\ + "pos_num_events INTEGER NOT NULL,"\ + "neg_num_events INTEGER NOT NULL,"\ + "pos_sum_event_weights_over_events DOUBLE NOT NULL,"\ + "neg_sum_event_weights_over_events DOUBLE NOT NULL,"\ + "pos_entries INTEGER NOT NULL,"\ + "neg_entries INTEGER NOT NULL,"\ + "pos_sum_event_weights_over_entries DOUBLE NOT NULL,"\ + "neg_sum_event_weights_over_entries DOUBLE NOT NULL,"\ + "pos_sum_squared_weights DOUBLE NOT NULL,"\ + "neg_sum_squared_weights DOUBLE NOT NULL,"\ + "pos_value_times_weight DOUBLE NOT NULL,"\ + "neg_value_times_weight DOUBLE NOT NULL,"\ + "pos_value_squared_times_weight DOUBLE NOT NULL,"\ + "neg_value_squared_times_weight DOUBLE NOT NULL,"\ + "primary key(name, id) );"; + + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating Statistics table"); + + sql = "CREATE TABLE IF NOT EXISTS Data("\ + "name TEXT NOT NULL,"\ + "id INTERGER NOT NULL,"\ + "bin TEXT NOT NULL,"\ + "positive DOUBLE NOT NULL,"\ + "negative DOUBLE NOT NULL,"\ + "primary key (name, id, bin) );"; + + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating Data table"); + + } + + + void createWeightNamesTable() { + sql = "CREATE TABLE IF NOT EXISTS WeightDefinition("\ + "id INTERGER NOT NULL,"\ + "definition TEXT NOT NULL,"\ + "primary key (id) );"; + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + checkDBErrors("creating Weight Names table"); + } + + + void addWeightDefinition(int id, string def) { + sql = "INSERT INTO WeightDefinition VALUES ('" + to_string(id) + "','" + def + "')"; + rc = sqlite3_exec(db, sql.c_str(), callback, 0, &zErrMsg); + } + + + void addHisto(string name, int bins, double xmin, double xmax, string regions) { + sql = "INSERT INTO HistoDescription VALUES ('" + name + "'" + "," + "'" + to_string(bins) + "'" + "," + "'" + to_string(xmin) + "'" + "," + "'" + to_string(xmax) + "'" + "," + "'" + regions + "')"; + rc = sqlite3_exec(db, sql.c_str(), callback, 0 , &zErrMsg); + //checkDBErrors("inserting into Histo: " + name); + } + + + void addStatistic(string name, + int id, + int pos_events, + int neg_events, + double pos_sum_events, + double neg_sum_events, + int pos_entries, + int neg_entries, + double pos_sum_entries, + double neg_sum_entries, + double pos_sum_squared, + double neg_sum_squared, + double pos_val_weight, + double neg_val_weight, + double pos_val2_weight, + double neg_val2_weight) { + + sql = "INSERT INTO Statistics VALUES ('" + name + "','" + to_string(id) + "','" + to_string(pos_events) + "','" + to_string(neg_events) + "','" + to_string(pos_sum_events) + "','" + to_string(neg_sum_events) + "','" + to_string(pos_entries) + "','" + to_string(neg_entries) + "','" + to_string(pos_sum_entries) + "','" + to_string(neg_sum_entries) + "','" + to_string(pos_sum_squared) + "','" + to_string(neg_sum_squared) + "','" + to_string(pos_val_weight) + "','" + to_string(neg_val_weight) + "','" + to_string(pos_val2_weight) + "','" + to_string(neg_val2_weight) + "')"; + + rc = sqlite3_exec(db, sql.c_str(), callback, 0 , &zErrMsg); + //checkDBErrors("inserting into Statistics: " + name); + + } + + + void addData(string name, int id, string bin, double positive, double negative) { + sql = "INSERT INTO Data VALUES ('" + name + "'" + "," + "'" + to_string(id) + "'" + "," + "'" + bin + "'" + "," + "'" + to_string(positive) + "'" + "," + "'" + to_string(negative) + "')"; + rc = sqlite3_exec(db, sql.c_str(), callback, 0 , &zErrMsg); + //checkDBErrors("inserting into Data: " + name + " " + to_string(id)); + + } + + + void addCut(string r_name, string c_name) { + + sql = "INSERT INTO Cutflow VALUES ('" + r_name + "'" + "," + "'" + c_name + "')"; + //cout << sql << endl; + rc = sqlite3_exec(db, sql.c_str(), callback, 0 , &zErrMsg); + //checkDBErrors("inserting cutflow: " + r_name + " " + c_name); + } + + + void addWeight(string r_name, string c_name, int id, int pos, int neg, double pos_sum, double neg_sum, double pos_2sum, double neg_2sum) { + sql = "INSERT INTO Weights VALUES ('" + r_name + "'" + ",'" + c_name + "','" + to_string(id) + "','" + to_string(pos) + "','" + to_string(neg) + "','" + to_string(pos_sum) + "','" + to_string(neg_sum) \ + + "','" + to_string(pos_2sum) + "','" + to_string(neg_2sum) + "')"; + //cout << sql << endl; + rc = sqlite3_exec(db, sql.c_str(), callback, 0 , &zErrMsg); + //checkDBErrors("inserting weight values: " + r_name + " " + c_name + " weight ID: " + to_string(id)); + } + + + void closeDB() { + // Close the SQL connection + sqlite3_close(db); + } + +}; + + + + +DatabaseManager::DatabaseManager(string path) { + manager = new SQLiteBase(path); +} + +DatabaseManager::~DatabaseManager() { + delete manager; +} + +void DatabaseManager::createCutflowTables() { + manager->createCutflowTables(); +} + +void DatabaseManager::createHistoTables() { + manager->createHistoTables(); +} + +void DatabaseManager::createWeightNamesTable() { + manager->createWeightNamesTable(); +} + +void DatabaseManager::addWeightDefinition(int id, std::string def) { + manager->addWeightDefinition(id, def); +} + +void DatabaseManager::addHisto(std::string name, int bins, double xmin, double xmax, std::string regions) { + manager->addHisto(name, bins, xmin, xmax, regions); +} + +void DatabaseManager::addStatistic(std::string name, + int id, + int pos_events, + int neg_events, + double pos_sum_events, + double neg_sum_events, + int pos_entries, + int neg_entries, + double pos_sum_entries, + double neg_sum_entries, + double pos_sum_squared, + double neg_sum_squared, + double pos_val_weight, + double neg_val_weight, + double pos_val2_weight, + double neg_val2_weight) { + manager->addStatistic(name,id,pos_events, neg_events, pos_sum_events, neg_sum_events, pos_entries, neg_entries, pos_sum_entries, neg_sum_entries, pos_sum_squared, neg_sum_squared, pos_val_weight, neg_val_weight, pos_val2_weight, neg_val2_weight); + } + +void DatabaseManager::addData(std::string name, int id, std::string bin, double positive, double negative) { + manager->addData(name, id, bin, positive, negative); +} + +void DatabaseManager::addCut(std::string r_name, std::string c_name) { + manager->addCut(r_name, c_name); +} + +void DatabaseManager::addWeight(std::string r_name, std::string c_name, int id, int pos, int neg, double pos_sum, double neg_sum, double pos_2sum, double neg_2sum) { + manager->addWeight(r_name,c_name, id, pos, neg, pos_sum, neg_sum, pos_2sum, neg_2sum); +} + +void DatabaseManager::closeDB() { + manager->closeDB(); +} + + + diff --git a/tools/SampleAnalyzer/Process/Analyzer/MergingPlots.cpp b/tools/SampleAnalyzer/Process/Analyzer/MergingPlots.cpp index af133836..e4bf6f13 100644 --- a/tools/SampleAnalyzer/Process/Analyzer/MergingPlots.cpp +++ b/tools/SampleAnalyzer/Process/Analyzer/MergingPlots.cpp @@ -109,7 +109,7 @@ MAbool MergingPlots::Execute(SampleFormat& mySample, const EventFormat& myEvent) WARNING << "Found one event with a zero weight. Skipping..." << endmsg; return false; } - Manager()->InitializeForNewEvent(myEventWeight); + Manager()->InitializeForNewEvent(myEventWeight, myEvent.mc()->multiweights().GetWeights()); // Getting number of extra jets in the event MAuint32 njets = 0; diff --git a/tools/SampleAnalyzer/Process/Core/SampleAnalyzer.cpp b/tools/SampleAnalyzer/Process/Core/SampleAnalyzer.cpp index 19d05390..4a34699e 100644 --- a/tools/SampleAnalyzer/Process/Core/SampleAnalyzer.cpp +++ b/tools/SampleAnalyzer/Process/Core/SampleAnalyzer.cpp @@ -39,6 +39,10 @@ #include "SampleAnalyzer/Commons/Service/ExceptionService.h" #include "SampleAnalyzer/Commons/Service/Physics.h" +//#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" +#include "SampleAnalyzer/Commons/Base/OutputManager.h" + + using namespace MA5; @@ -804,7 +808,7 @@ MAbool SampleAnalyzer::Finalize(std::vector& mySamples, out.Finalize(); } - + // Saving the cut flows for(MAuint32 i=0; i& mySamples, RegionSelection *myRS = myanalysis->Manager()->Regions()[j]; std::string safname = myanalysis->Output() + "/Cutflows/" + CleanName(myRS->GetName()) + ".saf"; - out.Initialize(&cfg_, safname.c_str()); + out.Initialize(&cfg_, safname.c_str()); out.WriteHeader(); myRS->WriteCutflow(out); out.WriteFoot(); @@ -822,6 +826,50 @@ MAbool SampleAnalyzer::Finalize(std::vector& mySamples, } } + + for(MAuint32 i = 0; i < analyzers_.size(); ++i){ + + std::string histo_path = analyzers_[i]->Output() + "/Histograms/histo.db"; + std::string cutflow_path = analyzers_[i]->Output() + "/Cutflows/cutflows.db"; + vector > outputs; + outputs.push_back({"sqlite", cutflow_path, histo_path}); + OutputManager output_manager(outputs, analyzers_[i], &mySamples[0]); + output_manager.Execute(); + } + + + + /* + // Create histo SQlite file + for(MAuint32 i = 0; i < analyzers_.size(); ++i){ + std::string path = analyzers_[i]->Output() + "/Histograms/histo.db"; + DatabaseManager dbManager(path); + dbManager.createHistoTables(); + analyzers_[i]->Manager()->GetPlotManager()->WriteSQL(dbManager); + dbManager.closeDB(); + } + + //save multi-weight cutflows to SQL - Kyle Fan + for(int i = 0; i < analyzers_.size(); ++i){ + std::string path = analyzers_[i]->Output() + "/Cutflows/cutflows.db"; + DatabaseManager dbManager(path); + dbManager.createCutflowTables(); + bool addInitial = true; + + AnalyzerBase* myanalysis = analyzers_[i]; + mySamples[0].mc()->WriteWeightNames(dbManager); + //insert region,cut pair to cutflow table and region,cut,weight_id (weight data) to weights table + for(int j = 0; j < myanalysis->Manager()->Regions().size(); ++j){ + RegionSelection *myRS = myanalysis->Manager()->Regions()[j]; + myRS->WriteSQL(dbManager, addInitial); + } + dbManager.closeDB(); + } + + //end of multi-weight cutflow code + */ + + // The user-defined stuff for(MAuint32 i=0; iFinalize(summary,mySamples); @@ -1316,4 +1364,4 @@ void SampleAnalyzer::AddDefaultInvisible() PHYSICS->mcConfig().AddInvisibleId(16); PHYSICS->mcConfig().AddInvisibleId(1000022); PHYSICS->mcConfig().AddInvisibleId(1000039); -} \ No newline at end of file +} diff --git a/tools/SampleAnalyzer/Process/Core/xdr_istream.cpp b/tools/SampleAnalyzer/Process/Core/xdr_istream.cpp index cd3bc81c..3b65f1a6 100644 --- a/tools/SampleAnalyzer/Process/Core/xdr_istream.cpp +++ b/tools/SampleAnalyzer/Process/Core/xdr_istream.cpp @@ -50,8 +50,8 @@ xdr_istream& xdr_istream::operator>>(std::string &s) MAchar* dummy = new MAchar[pad]; sb_->sgetn(dummy,pad); - delete line; - delete dummy; + delete [] line; + delete [] dummy; return *this; } diff --git a/tools/SampleAnalyzer/Process/Counter/Counter.h b/tools/SampleAnalyzer/Process/Counter/Counter.h index dde27d9f..a3b213ff 100644 --- a/tools/SampleAnalyzer/Process/Counter/Counter.h +++ b/tools/SampleAnalyzer/Process/Counter/Counter.h @@ -33,6 +33,30 @@ #include #include #include +#include + +struct multiWeightEntry { + std::pair nentries_; + std::pair sumweight_; + std::pair sumweight2_; + multiWeightEntry(double input = 0){ + nentries_.first = input; + nentries_.second = input; + sumweight_.first = input; + sumweight_.second = input; + sumweight2_.first = input; + sumweight2_.second = input; + } + + multiWeightEntry(const multiWeightEntry &rhs){ + nentries_.first = rhs.nentries_.first; + nentries_.second = rhs.nentries_.second; + sumweight_.first = rhs.sumweight_.first; + sumweight_.second = rhs.sumweight_.second; + sumweight2_.first = rhs.sumweight2_.first; + sumweight2_.second = rhs.sumweight2_.second; + } +}; namespace MA5 @@ -49,6 +73,8 @@ class Counter // ------------------------------------------------------------- public : + + /// name of the analysis std::string name_; @@ -64,6 +90,8 @@ class Counter /// first = positive weight ; second = negative weight std::pair sumweight2_; + std::map multiweight_; + // ------------------------------------------------------------- // method members @@ -71,7 +99,7 @@ class Counter public : /// Constructor without argument - Counter(const std::string& name = "unkwown") + Counter(const std::string& name = "unknown") { name_ = name; nentries_ = std::make_pair(0,0); @@ -81,7 +109,7 @@ class Counter /// Destructor ~Counter() - { } + { } /// Reset void Reset() @@ -89,10 +117,57 @@ class Counter nentries_ = std::make_pair(0,0); sumweight_ = std::make_pair(0.,0.); sumweight2_ = std::make_pair(0.,0.); + multiweight_.clear(); + } + + /* + void Debug(int debugCount, int weightprocessed){ + for(auto &p : multiweight_){ + std::ofstream output; + output.open("/Users/kfan/desktop/output.txt", std::fstream::app); + output << "ID : " << p.first << " increment multiweight call number : " << debugCount << " weight Processed :"<< weightprocessed; + output << "\n"; + output << "pos entries : " << p.second->nentries_.first << " -- " << nentries_.first << " neg entries : " << p.second->nentries_.second << " -- " << nentries_.second << "\n"; + output << "pos sum : " << p.second->sumweight_.first << " -- " << sumweight_.first << " neg sum : " << p.second->sumweight_.second << " -- " << sumweight_.second << "\n"; + output << "pos squared : " <sumweight2_.first << " -- " << sumweight2_.first << " neg squared : " << p.second->sumweight2_.second << " -- " << sumweight2_.second << "\n"; + output << "----------------------------------------------------------"; + output << "\n"; + output.close(); + + } + + } + */ + + + + void Increment(const std::map &multiweights){ + // static int incrementDebugCount = 0; + + for(const auto &weight : multiweights){ + + if(weight.second > 0){ + multiweight_[weight.first].nentries_.first++; + multiweight_[weight.first].sumweight_.first+=weight.second; + multiweight_[weight.first].sumweight2_.first+=weight.second*weight.second; + } + else if (weight.second < 0){ + multiweight_[weight.first].nentries_.second++; + multiweight_[weight.first].sumweight_.second+=weight.second; + multiweight_[weight.first].sumweight2_.second+=weight.second*weight.second; + } + } + + + //Debug testing + // Debug(incrementDebugCount, multiweights.begin()->second); + // incrementDebugCount++; + } + /// Increment the counter - void Increment(const MAfloat32& weight=1.) + void Increment(const MAfloat32& weight) { if (weight>0) { @@ -106,8 +181,22 @@ class Counter sumweight_.second+=weight; sumweight2_.second+=weight*weight; } + + } + + void Init(const std::map &weights){ + for(auto w : weights){ + if(multiweight_.find(w.first) == multiweight_.end()) {multiweight_[w.first] = multiWeightEntry(0) ;} + } } + + + + + + + }; } diff --git a/tools/SampleAnalyzer/Process/Counter/CounterManager.cpp b/tools/SampleAnalyzer/Process/Counter/CounterManager.cpp index a780da8c..84100561 100644 --- a/tools/SampleAnalyzer/Process/Counter/CounterManager.cpp +++ b/tools/SampleAnalyzer/Process/Counter/CounterManager.cpp @@ -26,6 +26,8 @@ #include "SampleAnalyzer/Process/Counter/CounterManager.h" + + using namespace MA5; /* @@ -77,13 +79,15 @@ void CounterManager::Write_RootFormat(TFile* output) const /// Write the counters in a TEXT file void CounterManager::Write_TextFormat(SAFWriter& output) const { + + // header *output.GetStream() << "" << std::endl; // name *output.GetStream() << "\"Initial number of events\" #" << std::endl; - // nentries + // nentries output.GetStream()->width(15); *output.GetStream() << std::left << std::scientific << initial_.nentries_.first; *output.GetStream() << " "; @@ -111,12 +115,10 @@ void CounterManager::Write_TextFormat(SAFWriter& output) const *output.GetStream() << "" << std::endl; *output.GetStream() << std::endl; - - // Loop over the counters for (MAuint32 i=0;i" << std::endl; // name @@ -154,4 +156,54 @@ void CounterManager::Write_TextFormat(SAFWriter& output) const *output.GetStream() << "" << std::endl; *output.GetStream() << std::endl; } + + +} + + +//pass in database handler object, flag to check if initial events has been added, no need to repeatedly insert +//initial since each region has an identical copy, pass in region name +void CounterManager::WriteSQL(DatabaseManager &db, bool &AddInitial, std::string region_name){ + if(AddInitial){ + //if add initial is true, insert the initial event cut, and for each weight id, insert the values into + //the database, set flag to false afterwards + db.addCut("initial","event"); + for(const auto &p : initial_.multiweight_){ + int id, pos_entries, neg_entries; + double pos_sum, neg_sum, pos_2sum, neg_2sum; + id = p.first; + pos_entries = p.second.nentries_.first; + neg_entries = p.second.nentries_.second; + pos_sum = p.second.sumweight_.first; + neg_sum = p.second.sumweight_.second; + pos_2sum = p.second.sumweight2_.first; + neg_2sum = p.second.sumweight2_.second; + db.addWeight("initial", "event", id, pos_entries, neg_entries, pos_sum, neg_sum, pos_2sum, neg_2sum); + } + AddInitial = false; + } + + //for each cut in the region, add region and cut names to cutflow table + //for each weight id in each cut, insert the values into the database + for(const auto &cut : counters_){ + std::string cut_name = cut.name_; + db.addCut(region_name, cut_name); + // std::cout << "number of multiweight entries in cut : " << region_name << " " << cut_name << " is : " << cut.multiweight_.size() << std::endl; + for(const auto &p : cut.multiweight_){ + int id, pos_entries, neg_entries; + double pos_sum, neg_sum, pos_2sum, neg_2sum; + id = p.first; + pos_entries = p.second.nentries_.first; + neg_entries = p.second.nentries_.second; + pos_sum = p.second.sumweight_.first; + neg_sum = p.second.sumweight_.second; + pos_2sum = p.second.sumweight2_.first; + neg_2sum = p.second.sumweight2_.second; + db.addWeight(region_name, cut_name, id, pos_entries, neg_entries, pos_sum, neg_sum, pos_2sum, neg_2sum); + } + + + } + } + diff --git a/tools/SampleAnalyzer/Process/Counter/CounterManager.h b/tools/SampleAnalyzer/Process/Counter/CounterManager.h index 654abed9..e281b151 100644 --- a/tools/SampleAnalyzer/Process/Counter/CounterManager.h +++ b/tools/SampleAnalyzer/Process/Counter/CounterManager.h @@ -31,10 +31,14 @@ #include #include + // SampleAnalyzer headers #include "SampleAnalyzer/Process/Counter/Counter.h" #include "SampleAnalyzer/Process/Writer/SAFWriter.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + + namespace MA5 { @@ -89,8 +93,14 @@ class CounterManager { return counters_[index];} /// Incrementing the initial number of events - void IncrementNInitial(MAfloat32 weight=1.0) - { initial_.Increment(weight); } + void IncrementNInitial(MAfloat32 weight, const std::map &multiweight) + { + initial_.Increment(weight); + initial_.Increment(multiweight); + for(auto &cut : counters_){ + cut.Init(multiweight); + } + } /// Incrementing the initial number of events Counter& GetInitial() @@ -101,6 +111,9 @@ class CounterManager /// Write the counters in a Text file void Write_TextFormat(SAFWriter& output) const; + //Write to SQL database + void WriteSQL(DatabaseManager &db, bool &AddInitial, std::string region_name); + /// Write the counters in a ROOT file // void Write_RootFormat(TFile* output) const; @@ -108,6 +121,16 @@ class CounterManager void Finalize() { Reset(); } + + /* + void IncrementNInitial(const std::map &multiweight){ + initial_.Increment(multiweight); + } + */ + + + + }; } diff --git a/tools/SampleAnalyzer/Process/Plot/Histo.cpp b/tools/SampleAnalyzer/Process/Plot/Histo.cpp index c02d778e..91c2d89f 100644 --- a/tools/SampleAnalyzer/Process/Plot/Histo.cpp +++ b/tools/SampleAnalyzer/Process/Plot/Histo.cpp @@ -42,6 +42,47 @@ void Histo::Write_TextFormat(std::ostream* output) *output << std::endl; } +//db class passed in from SampleAnalyzer execute function, adds histo, statistics and data for each weight id to +//SQlite3 output file +void Histo::WriteSQL(DatabaseManager &db){ + + //create string of regions associated with the Histo + std::string regionNames = ""; + for(MAuint32 i = 0; i < regions_.size(); ++i){ + if(i != 0) {regionNames += " ";} + regionNames += regions_[i]->GetName(); + } + //add general histo info to the Histo Description table + db.addHisto(name_, nbins_, xmin_, xmax_, regionNames); + //for each histo, weight id pair: add statistics info to Statistics table + for(const auto &weight_id : MultiweightHistoData){ + db.addStatistic(name_, + weight_id.first, + multiweight_event_info[weight_id.first].nevents_.first, + multiweight_event_info[weight_id.first].nevents_.second, + multiweight_event_info[weight_id.first].nevents_w_.first, + multiweight_event_info[weight_id.first].nevents_w_.second, + multiweight_event_info[weight_id.first].nentries_.first, + multiweight_event_info[weight_id.first].nentries_.second, + weight_id.second.sum_w_.first, + weight_id.second.sum_w_.second, + weight_id.second.sum_ww_.first, + weight_id.second.sum_ww_.second, + weight_id.second.sum_xw_.first, + weight_id.second.sum_xw_.second, + weight_id.second.sum_xxw_.first, + weight_id.second.sum_xxw_.second); + //for each weight histo,weight id pair: add bucket data to Data table + db.addData(name_, weight_id.first, "underflow", weight_id.second.underflow_.first, weight_id.second.underflow_.second); + for(int i = 0; i < nbins_; ++i){ + db.addData(name_, weight_id.first, std::to_string(i+1), weight_id.second.histo_[i].first, weight_id.second.histo_[i].second); + } + db.addData(name_, weight_id.first, "overflow", weight_id.second.overflow_.first, weight_id.second.overflow_.second); + } + + +} + /// Write the plot in a Text file void Histo::Write_TextFormatBody(std::ostream* output) diff --git a/tools/SampleAnalyzer/Process/Plot/Histo.h b/tools/SampleAnalyzer/Process/Plot/Histo.h index 6945d25b..b65ce583 100644 --- a/tools/SampleAnalyzer/Process/Plot/Histo.h +++ b/tools/SampleAnalyzer/Process/Plot/Histo.h @@ -36,6 +36,47 @@ #include "SampleAnalyzer/Process/RegionSelection/RegionSelection.h" #include "SampleAnalyzer/Commons/Service/ExceptionService.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + + +struct MultiWeightHisto { + std::vector< std::pair > histo_; + std::pair underflow_; + std::pair overflow_; + + /// Sum of event-weights over entries + std::pair sum_w_; + + /// Sum of squared weights + std::pair sum_ww_; + + /// Sum of value * weight + std::pair sum_xw_; + + /// Sum of value * value * weight + std::pair sum_xxw_; + + MultiWeightHisto() {} + + MultiWeightHisto(const MultiWeightHisto &rhs){ + histo_ = rhs.histo_; + underflow_.first = rhs.underflow_.first; + underflow_.second = rhs.underflow_.second; + overflow_.first = rhs.overflow_.first; + overflow_.second = rhs.overflow_.second; + sum_w_.first = rhs.sum_w_.first; + sum_w_.second = rhs.sum_w_.second; + sum_ww_.first = rhs.sum_ww_.first; + sum_ww_.second = rhs.sum_ww_.second; + sum_xw_.first = rhs.sum_xw_.first; + sum_xw_.second = rhs.sum_xw_.second; + sum_xxw_.first = rhs.sum_xxw_.first; + sum_xxw_.second = rhs.sum_xxw_.second; + } + + +}; + namespace MA5 { @@ -74,6 +115,8 @@ class Histo : public PlotBase /// RegionSelections attached to the histo std::vector regions_; + std::map MultiweightHistoData; + // ------------------------------------------------------------- // method members // ------------------------------------------------------------- @@ -138,6 +181,20 @@ class Histo : public PlotBase virtual ~Histo() { } + virtual void InitMultiweights(const std::map &weights){ + for(const auto &id_weight : weights){ + if(multiweight_event_info.find(id_weight.first) == multiweight_event_info.end()){ + multiweight_event_info[id_weight.first] = MultiweightEvents(); + } + if(MultiweightHistoData.find(id_weight.first) == MultiweightHistoData.end()){ + MultiweightHistoData[id_weight.first] = MultiWeightHisto(); + MultiweightHistoData[id_weight.first].histo_.resize(nbins_); + } + + } + } + + /// Setting the linked regions void SetSelectionRegions(std::vector myregions) { regions_.insert(regions_.end(), myregions.begin(), myregions.end()); } @@ -204,9 +261,65 @@ class Histo : public PlotBase } } + void Fill(MAfloat64 value, std::map &weights){ + // Safety : nan or isinf + try + { + if (std::isnan(value)) throw EXCEPTION_WARNING("Skipping a NaN (Not a Number) value in an histogram.","",0); + if (std::isinf(value)) throw EXCEPTION_WARNING("Skipping a Infinity value in an histogram.","",0); + } + catch (const std::exception& e) + { + MANAGE_EXCEPTION(e); + } + + for(const auto &id_weight : weights){ + + /* + if(MultiweightHistoData.find(id_weight.first) == MultiweightHistoData.end()){ + MultiweightHistoData[id_weight.first] = new MultiWeightHisto(); + MultiweightHistoData[id_weight.first]->histo_.resize(nbins_); + } + */ + //Positive weight + if(id_weight.second >= 0){ + multiweight_event_info[id_weight.first].nentries_.first++; + MultiweightHistoData[id_weight.first].sum_w_.first +=id_weight.second; + MultiweightHistoData[id_weight.first].sum_ww_.first +=id_weight.second * id_weight.second; + MultiweightHistoData[id_weight.first].sum_xw_.first +=value * id_weight.second; + MultiweightHistoData[id_weight.first].sum_xxw_.first +=value*value*id_weight.second; + if (value < xmin_) MultiweightHistoData[id_weight.first].underflow_.first+=id_weight.second; + else if (value >= xmax_) MultiweightHistoData[id_weight.first].overflow_.first+=id_weight.second; + else + { + MultiweightHistoData[id_weight.first].histo_[std::floor((value-xmin_)/step_)].first+=id_weight.second; + } + } else { + int absWeight = std::abs(id_weight.second); + multiweight_event_info[id_weight.first].nentries_.second++; + MultiweightHistoData[id_weight.first].sum_w_.second += absWeight; + MultiweightHistoData[id_weight.first].sum_ww_.second += absWeight * absWeight; + MultiweightHistoData[id_weight.first].sum_xw_.second +=value * absWeight; + MultiweightHistoData[id_weight.first].sum_xxw_.second +=value*value*absWeight; + if (value < xmin_) MultiweightHistoData[id_weight.first].underflow_.second+=absWeight; + else if (value >= xmax_) MultiweightHistoData[id_weight.first].overflow_.second+=absWeight; + else + { + MultiweightHistoData[id_weight.first].histo_[std::floor((value-xmin_)/step_)].second+=absWeight; + } + + } + } + + + + } + /// Write the plot in a ROOT file virtual void Write_TextFormat(std::ostream* output); + virtual void WriteSQL(DatabaseManager &db); + /// Write the plot in a ROOT file // virtual void Write_RootFormat(std::pair& histos); diff --git a/tools/SampleAnalyzer/Process/Plot/HistoFrequency.h b/tools/SampleAnalyzer/Process/Plot/HistoFrequency.h index 6816d2ef..7835882c 100644 --- a/tools/SampleAnalyzer/Process/Plot/HistoFrequency.h +++ b/tools/SampleAnalyzer/Process/Plot/HistoFrequency.h @@ -33,6 +33,16 @@ // SampleAnalyzer headers #include "SampleAnalyzer/Process/Plot/PlotBase.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + +struct MultiWeightHistoFrequency { + + /// collection of observables for each weight id entry + std::map > stack_; + /// Sum of even-weights over entries for each id entry + std::pair sum_w_; + +}; namespace MA5 @@ -52,6 +62,7 @@ class HistoFrequency : public PlotBase /// Sum of event-weights over entries std::pair sum_w_; + std::map MultiWeightHistoFrequencyData; /// RegionSelections attached to the histo std::vector regions_; @@ -125,6 +136,39 @@ class HistoFrequency : public PlotBase } } + void Fill(const MAint32 &obs, std::map &weights){ + + for(auto &id_weight : weights) { + iterator it = MultiWeightHistoFrequencyData[id_weight.first].stack_.find(obs); + if(it == MultiWeightHistoFrequencyData[id_weight.first].stack_.end()){ + MultiWeightHistoFrequencyData[id_weight.first].stack_[obs] = std::make_pair(0.,0.); + } + else { + if(id_weight.second >= 0){ + multiweight_event_info[id_weight.first].nentries_.first++; + MultiWeightHistoFrequencyData[id_weight.first].sum_w_.first += id_weight.second; + MultiWeightHistoFrequencyData[id_weight.first].stack_[obs].first += id_weight.second; + + } else { + int absWeight = std::abs(id_weight.second); + multiweight_event_info[id_weight.first].nentries_.second++; + MultiWeightHistoFrequencyData[id_weight.first].sum_w_.second += absWeight; + MultiWeightHistoFrequencyData[id_weight.first].stack_[obs].second += absWeight; + } + } + } + + } + + + virtual void WriteSQL(DatabaseManager &db) { + + + + + + }; + /// Write the plot in a ROOT file virtual void Write_TextFormat(std::ostream* output) { diff --git a/tools/SampleAnalyzer/Process/Plot/HistoLogX.h b/tools/SampleAnalyzer/Process/Plot/HistoLogX.h index b9f170f3..2ef7c8c4 100644 --- a/tools/SampleAnalyzer/Process/Plot/HistoLogX.h +++ b/tools/SampleAnalyzer/Process/Plot/HistoLogX.h @@ -49,6 +49,7 @@ class HistoLogX : public Histo MAfloat64 log_xmin_; MAfloat64 log_xmax_; + // ------------------------------------------------------------- // method members // ------------------------------------------------------------- @@ -165,6 +166,55 @@ class HistoLogX : public Histo } } + void Fill(MAfloat64 value, std::map &weights){ + + try + { + if (std::isnan(value)) throw EXCEPTION_WARNING("Skipping a NaN (Not a Number) value in an histogram.","",0); + if (std::isinf(value)) throw EXCEPTION_WARNING("Skipping a Infinity value in an histogram.","",0); + } + catch (const std::exception& e) + { + MANAGE_EXCEPTION(e); + } + + for(auto &id_weight : weights){ + if(id_weight.second >= 0) { + + multiweight_event_info[id_weight.first].nentries_.first++; + MultiweightHistoData[id_weight.first].sum_w_.first +=id_weight.second; + MultiweightHistoData[id_weight.first].sum_ww_.first +=id_weight.second * id_weight.second; + MultiweightHistoData[id_weight.first].sum_xw_.first +=value * id_weight.second; + + //in log bountries map, access by weight id which gives a pair, first element is the min and second is the max + if( value < xmin_) { + MultiweightHistoData[id_weight.first].underflow_.first +=id_weight.second; + } else if (value >= xmax_) { + MultiweightHistoData[id_weight.first].overflow_.first +=id_weight.second; + } else { + MultiweightHistoData[id_weight.first].histo_[std::floor((std::log10(value)-log_xmin_)/step_)].first+=id_weight.second; + } + + } else { + multiweight_event_info[id_weight.first].nentries_.second++; + int absWeight = std::abs(id_weight.second); + MultiweightHistoData[id_weight.first].sum_w_.second +=absWeight; + MultiweightHistoData[id_weight.first].sum_ww_.second +=absWeight * absWeight; + MultiweightHistoData[id_weight.first].sum_xw_.second +=value * absWeight; + if(value < xmin_) { + MultiweightHistoData[id_weight.first].underflow_.second +=absWeight; + } else if (value >= xmax_) MultiweightHistoData[id_weight.first].overflow_.second+=absWeight; + else + { + MultiweightHistoData[id_weight.first].histo_[std::floor((std::log10(value)-log_xmin_)/step_)].second+=absWeight; + } + + + } + } + + } + /// Write the plot in a Text file virtual void Write_TextFormat(std::ostream* output); diff --git a/tools/SampleAnalyzer/Process/Plot/PlotBase.h b/tools/SampleAnalyzer/Process/Plot/PlotBase.h index cd883247..89dfa795 100644 --- a/tools/SampleAnalyzer/Process/Plot/PlotBase.h +++ b/tools/SampleAnalyzer/Process/Plot/PlotBase.h @@ -35,6 +35,44 @@ // SampleAnalyzer headers #include "SampleAnalyzer/Commons/Service/LogService.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + + +struct MultiweightEvents { + /// Number of events + std::pair nevents_; + + /// Number of entries + std::pair nentries_; + + /// Sum of event-weight over events + std::pair nevents_w_; + + MAbool fresh_event_; + + MultiweightEvents() { + fresh_event_ = true; + nevents_.first = 0; + nevents_.second = 0; + nentries_.first = 0; + nentries_.second = 0; + nevents_w_.first = 0.; + nevents_w_.second = 0.; + + } + + MultiweightEvents(const MultiweightEvents &rhs){ + nevents_.first = rhs.nevents_.first; + nevents_.second = rhs.nevents_.second; + nentries_.first = rhs.nentries_.first; + nentries_.second = rhs.nentries_.second; + nevents_w_.first = rhs.nevents_w_.first; + nevents_w_.second = rhs.nevents_w_.second; + fresh_event_ = rhs.fresh_event_; + } + +}; + namespace MA5 { @@ -62,6 +100,9 @@ class PlotBase /// Flag telling whether a given histo has already been modified for an event MAbool fresh_event_; + //map containing event meta data for each weight id + std::map multiweight_event_info; + // ------------------------------------------------------------- // method members @@ -76,6 +117,7 @@ class PlotBase nentries_ = std::make_pair(0,0); nevents_w_ = std::make_pair(0,0); fresh_event_ = true; + } /// Constructor with argument @@ -86,21 +128,49 @@ class PlotBase nevents_w_ = std::make_pair(0,0); nentries_ = std::make_pair(0,0); fresh_event_ = true; + } /// Destructor virtual ~PlotBase() - { } + { + } /// Accesor for fresh_event MAbool FreshEvent() { return fresh_event_;} + /// Accessor for multiweight fresh_event + MAbool MultiweightFreshEvent(int index) { + return multiweight_event_info[index].fresh_event_; + } + /// Modifier for fresh_event - void SetFreshEvent(MAbool tag) { fresh_event_ = tag;} + void SetFreshEvent(MAbool tag) { + fresh_event_ = tag; + } + + /// Modifier for Multiweight fresh_event + void SetMultiweightFreshEvent(MAbool tag) { + for(auto &weight_id : multiweight_event_info){ + weight_id.second.fresh_event_ = tag; + } + } + + //initialize histograms multiweights (without initialization, 0 entries will not show up in histogram) + virtual void InitMultiweights(const std::map &weights){ + for(const auto &id_weight : weights){ + if(multiweight_event_info.find(id_weight.first) == multiweight_event_info.end()){ + multiweight_event_info[id_weight.first] = MultiweightEvents(); + } + + } + } /// Write the plot in a ROOT file virtual void Write_TextFormat(std::ostream* output) = 0; + virtual void WriteSQL(DatabaseManager &db) {}; + /// Increment number of events void IncrementNEvents(MAfloat64 weight=1.0) { @@ -118,10 +188,37 @@ class PlotBase SetFreshEvent(false); } + void IncrementNEvents(std::map &weights){ + for(const auto &id_weight : weights){ + /* + if(multiweight_event_info.find(id_weight.first) == multiweight_event_info.end()){ + multiweight_event_info[id_weight.first] = new MultiweightEvents(); + multiweight_event_info[id_weight.first]->fresh_event_ = true; + } + */ + + if(MultiweightFreshEvent(id_weight.first)){ + if(id_weight.second >= 0){ + multiweight_event_info[id_weight.first].nevents_.first++; + multiweight_event_info[id_weight.first].nevents_w_.first+=id_weight.second; + } else { + multiweight_event_info[id_weight.first].nevents_.second++; + multiweight_event_info[id_weight.first].nevents_w_.second+=std::abs(id_weight.second); + } + + } + multiweight_event_info[id_weight.first].fresh_event_ = false; + } + } + /// Return Number of events const std::pair& GetNEvents() { return nevents_; } + const std::pair& GetNEvents(int index){ + return multiweight_event_info[index].nevents_; + } + // Return the name std::string GetName() { return name_; } diff --git a/tools/SampleAnalyzer/Process/Plot/PlotManager.h b/tools/SampleAnalyzer/Process/Plot/PlotManager.h index 737c7030..76aa8442 100644 --- a/tools/SampleAnalyzer/Process/Plot/PlotManager.h +++ b/tools/SampleAnalyzer/Process/Plot/PlotManager.h @@ -39,6 +39,8 @@ #include "SampleAnalyzer/Process/Writer/SAFWriter.h" #include "SampleAnalyzer/Process/RegionSelection/RegionSelection.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" + namespace MA5 { @@ -139,6 +141,13 @@ class PlotManager /// Write the counters in a Text file void Write_TextFormat(SAFWriter& output); + void WriteSQL(DatabaseManager &db){ + for(auto &pl : plots_){ + pl->WriteSQL(db); + } + } + + /// Finalizing void Finalize() { Reset(); } diff --git a/tools/SampleAnalyzer/Process/Reader/HEPMCReader.cpp b/tools/SampleAnalyzer/Process/Reader/HEPMCReader.cpp index 9ef07ab5..6e854d77 100644 --- a/tools/SampleAnalyzer/Process/Reader/HEPMCReader.cpp +++ b/tools/SampleAnalyzer/Process/Reader/HEPMCReader.cpp @@ -228,7 +228,7 @@ MAbool HEPMCReader::FinalizeEvent(SampleFormat& mySample, EventFormat& myEvent) //------------------------------------------------------------------------------ // FillWeightNames //------------------------------------------------------------------------------ -MAbool HEPMCReader::FillWeightNames(const std::string& line) +MAbool HEPMCReader::FillWeightNames(const std::string& line, SampleFormat& mySample) { // Splitting line in words std::stringstream str; @@ -254,6 +254,8 @@ MAbool HEPMCReader::FillWeightNames(const std::string& line) if (tmp[0]=='"' && tmp[tmp.size()-1]=='"') tmp=tmp.substr(1,tmp.size()-2); weight_names[i]=tmp; + mySample.mc()->SetName(i+1, tmp); + } // Ok @@ -298,7 +300,7 @@ MAbool HEPMCReader::FillEvent(const std::string& line, if(firstWord=="E") FillEventInformations(line, myEvent); // Weight names - else if (firstWord=="N") FillWeightNames(line); + else if (firstWord=="N") FillWeightNames(line, mySample); // Event units else if (firstWord=="U") FillUnits(line); diff --git a/tools/SampleAnalyzer/Process/Reader/HEPMCReader.h b/tools/SampleAnalyzer/Process/Reader/HEPMCReader.h index fc2b42e3..82c65403 100644 --- a/tools/SampleAnalyzer/Process/Reader/HEPMCReader.h +++ b/tools/SampleAnalyzer/Process/Reader/HEPMCReader.h @@ -111,7 +111,7 @@ class HEPMCReader : public ReaderTextBase void FillEventPDFInfo(const std::string& line, SampleFormat& mySample, EventFormat& myEvent); void FillEventParticleLine(const std::string& line, EventFormat& myEvent); void FillEventVertexLine(const std::string& line, EventFormat& myEvent); - MAbool FillWeightNames(const std::string& line); + MAbool FillWeightNames(const std::string& line, SampleFormat& mySample); MAbool FillHeavyIons(const std::string& line); }; diff --git a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelection.h b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelection.h index 2f491436..9841fe79 100644 --- a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelection.h +++ b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelection.h @@ -29,11 +29,13 @@ // STL headers #include #include +#include // SampleAnalyzer headers #include "SampleAnalyzer/Process/Counter/CounterManager.h" #include "SampleAnalyzer/Process/Writer/SAFWriter.h" +#include "SampleAnalyzer/Commons/Base/DatabaseManager.h" namespace MA5 { @@ -48,6 +50,8 @@ class RegionSelection MAbool surviving_; MAuint32 NumberOfCutsAppliedSoFar_; MAfloat64 weight_; + std::map multiWeight_; + CounterManager cutflow_; @@ -64,10 +68,12 @@ class RegionSelection /// Destructor ~RegionSelection() { }; + /// Get methods std::string GetName() { return name_; } + MAbool IsSurviving() { return surviving_; } @@ -79,7 +85,12 @@ class RegionSelection /// Printing the cutflow void WriteCutflow(SAFWriter& output) - { cutflow_.Write_TextFormat(output); } + { cutflow_.Write_TextFormat(output);} + + //write to SQL database + void WriteSQL(DatabaseManager &db, bool &AddInitial){ + cutflow_.WriteSQL(db, AddInitial, name_); + } /// Set methods void SetName(std::string name) @@ -97,11 +108,14 @@ class RegionSelection void SetNumberOfCutsAppliedSoFar(MAuint32 NumberOfCutsAppliedSoFar) { NumberOfCutsAppliedSoFar_ = NumberOfCutsAppliedSoFar; } + // Increment CutFlow (when this region passes a cut) - void IncrementCutFlow(MAfloat64 weight) + void IncrementCutFlow(MAfloat64 weight, const std::map &multiweight) { cutflow_[NumberOfCutsAppliedSoFar_].Increment(weight); + cutflow_[NumberOfCutsAppliedSoFar_].Increment(multiweight); NumberOfCutsAppliedSoFar_++; + } // Add a cut to the CutFlow @@ -109,14 +123,24 @@ class RegionSelection { cutflow_.InitCut(CutName); } /// Getting ready for a new event - void InitializeForNewEvent(const MAfloat64 &weight) + void InitializeForNewEvent(const MAfloat64 &weight,const std::map &multiweight) { SetSurvivingTest(true); SetNumberOfCutsAppliedSoFar(0); - cutflow_.IncrementNInitial(weight); + cutflow_.IncrementNInitial(weight, multiweight); + multiWeight_=multiweight; weight_=weight; } + //Multiweight Integration implementation below + void SetWeight(const std::map &multiweight) { + multiWeight_ = multiweight; + } + + std::map GetWeights() {return multiWeight_;} + + + }; } diff --git a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.cpp b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.cpp index b666a4af..760ed885 100644 --- a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.cpp +++ b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.cpp @@ -24,6 +24,7 @@ // STL headers #include +#include // SampleAnalyzer headers #include "SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.h" @@ -63,13 +64,18 @@ MAbool RegionSelectionManager::ApplyCut(MAbool condition, std::string const &cut std::vector RegionsForThisCut = mycut->Regions(); for (MAuint32 i=0; iIsSurviving() ) { continue; } /// Check the current cut: - if(condition) { ThisRegion->IncrementCutFlow(ThisRegion->GetWeight()); } + if(condition) { + ThisRegion->IncrementCutFlow(ThisRegion->GetWeight(), ThisRegion->GetWeights()); + //multiweight + //ThisRegion->IncrementCutFlow(ThisRegion->GetWeights()); + } else { ThisRegion->SetSurvivingTest(false); @@ -107,8 +113,12 @@ void RegionSelectionManager::FillHisto(std::string const&histname, MAfloat64 val } catch (const std::exception& e) { MANAGE_EXCEPTION(e); } // Filling the histo - if (myhistof->FreshEvent()) myhistof->IncrementNEvents(weight_); + if (myhistof->FreshEvent()) { + myhistof->IncrementNEvents(weight_); + myhistof->IncrementNEvents(multiweight_); + } myhistof->Fill(val,weight_); + myhistof->Fill(val,multiweight_); } // LogX histo else if(dynamic_cast(plotmanager_.GetHistos()[i])!=0) @@ -122,8 +132,12 @@ void RegionSelectionManager::FillHisto(std::string const&histname, MAfloat64 val } catch (const std::exception& e) { MANAGE_EXCEPTION(e); } // Filling the histo - if (myhistoX->FreshEvent()) myhistoX->IncrementNEvents(weight_); + if (myhistoX->FreshEvent()) { + myhistoX->IncrementNEvents(weight_); + myhistoX->IncrementNEvents(multiweight_); + } myhistoX->Fill(val,weight_); + myhistoX->Fill(val,multiweight_); } // Normal histo else if(dynamic_cast(plotmanager_.GetHistos()[i])!=0) @@ -137,8 +151,14 @@ void RegionSelectionManager::FillHisto(std::string const&histname, MAfloat64 val } catch (const std::exception& e) { MANAGE_EXCEPTION(e); } // Filling the histo - if (myhisto->FreshEvent()) myhisto->IncrementNEvents(weight_); + if (myhisto->FreshEvent()) { + myhisto->IncrementNEvents(weight_); + myhisto->IncrementNEvents(multiweight_); + } + myhisto->Fill(val,weight_); + //myhisto->IncrementNEvents(multiweight_); + myhisto->Fill(val,multiweight_); } break; } diff --git a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.h b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.h index 2e250199..4cc74c98 100644 --- a/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.h +++ b/tools/SampleAnalyzer/Process/RegionSelection/RegionSelectionManager.h @@ -30,6 +30,10 @@ #include #include #include +#include +#include + + // SampleAnalyzer headers #include "SampleAnalyzer/Process/Counter/MultiRegionCounterManager.h" @@ -38,7 +42,9 @@ #include "SampleAnalyzer/Commons/Service/LogService.h" #include "SampleAnalyzer/Process/Writer/SAFWriter.h" #include "SampleAnalyzer/Commons/Service/ExceptionService.h" +#include "SampleAnalyzer/Commons/DataFormat/WeightCollection.h" +using namespace std; namespace MA5 { @@ -65,6 +71,11 @@ class RegionSelectionManager /// Weight associated with the processed event MAfloat64 weight_; + + //multiweight + std::map multiweight_; + + // ------------------------------------------------------------- // method members // ------------------------------------------------------------- @@ -73,7 +84,11 @@ class RegionSelectionManager RegionSelectionManager() {}; /// Destructor - ~RegionSelectionManager() { }; + ~RegionSelectionManager() { + for(auto ®ion_pointer : regions_){ + delete region_pointer; + } + }; /// Reset void Reset() @@ -101,6 +116,11 @@ class RegionSelectionManager MAfloat64 GetCurrentEventWeight() { return weight_; } + std::map GetCurrentEventMultiWeight(){ + return multiweight_; + } + + /// Set method void SetCurrentEventWeight(MAfloat64 weight) { @@ -120,6 +140,26 @@ class RegionSelectionManager } } + + //set methods for multiweight + void SetCurrentEventWeight(const WeightCollection &multiweight){ + multiweight_ = multiweight.GetWeights(); + for(auto ®ion : regions_){ + region->SetWeight(multiweight_); + } + } + + void SetRegionWeight(std::string name, std::map &multiweight){ + for(auto ®ion : regions_){ + if(region->GetName() == name) { + region->SetWeight(multiweight); + break; + } + } + } + + + /// Adding a RegionSelection to the manager void AddRegionSelection(const std::string& name) { @@ -135,16 +175,43 @@ class RegionSelectionManager } /// Getting ready for a new event - void InitializeForNewEvent(MAfloat64 EventWeight) + void InitializeForNewEvent(MAfloat64 EventWeight, const std::map &weights) { weight_ = EventWeight; + multiweight_ = weights; NumberOfSurvivingRegions_ = regions_.size(); for (MAuint32 i=0; iInitializeForNewEvent(EventWeight); + { + regions_[i]->InitializeForNewEvent(EventWeight, weights); + + + } for (MAuint32 i=0; i < plotmanager_.GetNplots(); i++) - plotmanager_.GetHistos()[i]->SetFreshEvent(true); + { + plotmanager_.GetHistos()[i]->SetFreshEvent(true); + plotmanager_.GetHistos()[i]->SetMultiweightFreshEvent(true); + plotmanager_.GetHistos()[i]->InitMultiweights(weights); + } } + /// Initialize for multiweight + + + /* + void InitializeForNewEvent(const std::map &weights){ + multiweight_ = weights; + //NumberOfSurvivingRegions_ = regions_.size(); + for (MAuint32 i=0; iInitializeForNewEvent(weights); + } + for(MAuint32 i = 0; i < plotmanager_.GetNplots(); ++i){ + plotmanager_.GetHistos()[i]->SetMultiweightFreshEvent(true); + } + } + */ + + /// This method associates all regions with a cut void AddCut(const std::string&name) { @@ -474,10 +541,16 @@ class RegionSelectionManager return false; } + + + /// Dumping the content of the counters void HeadSR(std::ostream &, const std::string&); void DumpSR(std::ostream &); + + + }; }