Skip to content

Commit 2fdd805

Browse files
committed
update io
1 parent 567b643 commit 2fdd805

File tree

1 file changed

+22
-11
lines changed

1 file changed

+22
-11
lines changed

likelihood_combiner/io.py

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -325,8 +325,11 @@ def gLikeLimits_to_lklcomLimits(input_dir,
325325

326326
# Getting the txt files of the input directory.
327327
files = np.array([x for x in os.listdir(input_dir) if x.endswith(".txt")])
328+
channels = np.unique([file.split("_")[0] for file in files])
328329
# Looping over the files and store the likelihood or ts tables into the lklcom hdf5 file.
329330
svUL = {}
331+
for channel in channels:
332+
svUL[channel] = {}
330333
for file in files:
331334
# Parsing the file name.
332335
file_info = file.replace('.txt','').split("_")
@@ -341,14 +344,17 @@ def gLikeLimits_to_lklcomLimits(input_dir,
341344
# Going through the table in the txt file and storing the entries in a 2D array.
342345
table = np.array([[i for i in line.split()] for line in txt_file], dtype=np.float32)
343346

344-
# Dumping the upper
347+
# Dumping the upper limits
345348

346-
col_name = "data"
347349
if simulation != -1:
348350
col_name = "simu_{}".format(simulation)
349-
svUL[col_name] = table[1]
351+
else:
352+
col_name = "data"
353+
svUL[file_info[0]]['masses'] = table[0]
354+
svUL[file_info[0]][col_name] = table[1]
350355

351-
pd.DataFrame(data=svUL).to_hdf(output_file, key='{}/{}'.format(file_info[0], file_info[1]), mode="a")
356+
for channel in channels:
357+
pd.DataFrame(data=svUL[channel]).to_hdf(output_file, key='{}/{}'.format(channel, file_info[1]), mode="a")
352358

353359
def _gLikeLimits_to_lklcomLimits():
354360
"""
@@ -384,9 +390,13 @@ def merge_to_lklcom(input_dir,
384390

385391
# Getting the h5 files of the input directory.
386392
files = np.array([x for x in os.listdir(input_dir) if x.endswith(".h5") or x.endswith(".hdf5")])
387-
393+
channels = np.unique([file.split("_")[0] for file in files])
394+
388395
j_nuisance = False
389396
svUL, svUL_Jnuisance = {}, {}
397+
for channel in channels:
398+
svUL[channel] = {}
399+
svUL_Jnuisance[channel] = {}
390400
for file in files:
391401
# Parsing the file name.
392402
file_info = file.replace('.hdf5','').replace('.h5','').split("_")
@@ -395,17 +405,18 @@ def merge_to_lklcom(input_dir,
395405
data = pd.HDFStore("{}/{}".format(input_dir, file), 'r')
396406

397407
if '/masses' in data.keys():
398-
svUL['masses'] = svUL_Jnuisance['masses'] = data['masses'][0]
408+
svUL[file_info[0]]['masses'] = svUL_Jnuisance[file_info[0]]['masses'] = data['masses'][0]
399409
if '/sigmavULs' in data.keys():
400-
svUL[file_info[1]] = data['sigmavULs'][0]
410+
svUL[file_info[0]][file_info[1]] = data['sigmavULs'][0]
401411
if '/sigmavULs_Jnuisance' in data.keys():
402412
j_nuisance = True
403-
svUL_Jnuisance[file_info[1]] = data['sigmavULs_Jnuisance'][0]
413+
svUL_Jnuisance[file_info[0]][file_info[1]] = data['sigmavULs_Jnuisance'][0]
404414

405415
# Write the panda DataFrames into the hdf5 file
406-
pd.DataFrame(data=svUL).to_hdf(output_file, key='{}/sigmavULs'.format(file_info[0]), mode='a')
407-
if j_nuisance:
408-
pd.DataFrame(data=svUL_Jnuisance).to_hdf(output_file, key='{}/sigmavULs_Jnuisance'.format(file_info[0]), mode='a')
416+
for channel in channels:
417+
pd.DataFrame(data=svUL[channel]).to_hdf(output_file, key='{}/sigmavULs'.format(channel), mode='a')
418+
if j_nuisance:
419+
pd.DataFrame(data=svUL_Jnuisance[channel]).to_hdf(output_file, key='{}/sigmavULs_Jnuisance'.format(channel), mode='a')
409420

410421
def _merge_to_lklcom():
411422
"""

0 commit comments

Comments
 (0)