diff --git a/src/HHbbVV/bash/run_local.sh b/src/HHbbVV/bash/run_local.sh index de5304c5..a012cddf 100755 --- a/src/HHbbVV/bash/run_local.sh +++ b/src/HHbbVV/bash/run_local.sh @@ -9,7 +9,7 @@ # done -year=2018 +year=2016APV processor=skimmer extraargs="--maxchunks 20000" # extraargs="--no-inference" @@ -17,11 +17,11 @@ extraargs="--maxchunks 20000" OUTPUTDIR="tmp/test_outputs/$year" mkdir -p $OUTPUTDIR -python -W ignore src/run.py --processor $processor --year $year --samples HH --subsamples GluGluToHHTobbVV_node_cHHH1 --save-systematics --starti 0 --endi 1 $extraargs -label="GluGluToHHTobbVV_node_cHHH1" -mkdir -p $OUTPUTDIR/$label/parquet $OUTPUTDIR/$label/pickles -mv "0-1.parquet" $OUTPUTDIR/$label/parquet/ -mv "outfiles/0-1.pkl" $OUTPUTDIR/$label/pickles/ +# python -W ignore src/run.py --processor $processor --year $year --samples HH --subsamples GluGluToHHTobbVV_node_cHHH1 --save-systematics --starti 0 --endi 1 $extraargs +# label="GluGluToHHTobbVV_node_cHHH1" +# mkdir -p $OUTPUTDIR/$label/parquet $OUTPUTDIR/$label/pickles +# mv "0-1.parquet" $OUTPUTDIR/$label/parquet/ +# mv "outfiles/0-1.pkl" $OUTPUTDIR/$label/pickles/ # python -W ignore src/run.py --processor $processor --year $year --samples XHY --subsamples NMSSM_XToYHTo2W2BTo4Q2B_MX-3000_MY-250 --save-systematics --starti 0 --endi 1 $extraargs # label="NMSSM_XToYHTo2W2BTo4Q2B_MX-3000_MY-250" @@ -41,7 +41,7 @@ mv "outfiles/0-1.pkl" $OUTPUTDIR/$label/pickles/ # mv "0-1.parquet" $OUTPUTDIR/$label/parquet/ # mv "outfiles/0-1.pkl" $OUTPUTDIR/$label/pickles/ -python -W ignore src/run.py --processor $processor --year $year --samples "JetHT$year" --subsamples "JetHT_Run${year}F" --save-systematics --starti 0 --endi 1 $extraargs +python -W ignore src/run.py --processor $processor --year $year --samples "JetHT2016" --subsamples "JetHT_Run2016D_HIPM" --save-systematics --starti 0 --endi 1 $extraargs label="JetHT_Run${year}D" mkdir -p $OUTPUTDIR/$label/parquet $OUTPUTDIR/$label/pickles mv "0-1.parquet" $OUTPUTDIR/$label/parquet/ diff --git a/src/HHbbVV/processors/bbVVSkimmer.py b/src/HHbbVV/processors/bbVVSkimmer.py index 8cdea46a..b0e8e1da 100644 --- a/src/HHbbVV/processors/bbVVSkimmer.py +++ b/src/HHbbVV/processors/bbVVSkimmer.py @@ -504,7 +504,7 @@ def process(self, events: ak.Array): metfilters = np.ones(len(events), dtype="bool") metfilterkey = "data" if isData else "mc" - for mf in self.metfilters[year][metfilterkey]: + for mf in self.metfilters[year[:4]][metfilterkey]: if mf in events.Flag.fields: metfilters = metfilters & events.Flag[mf] diff --git a/src/condor/check_jobs.py b/src/condor/check_jobs.py index 6765ca3a..74f6a0e4 100644 --- a/src/condor/check_jobs.py +++ b/src/condor/check_jobs.py @@ -33,32 +33,36 @@ trigger_processor = args.processor.startswith("trigger") eosdir = f"/eos/uscms/store/user/{args.user}/bbVV/{args.processor}/{args.tag}/{args.year}/" +user_condor_dir = f"/uscms/home/{args.user}/nobackup/HHbbVV/condor/" samples = listdir(eosdir) jdls = [ jdl - for jdl in listdir( - f"/uscms/home/{args.user}/nobackup/HHbbVV/condor/{args.processor}/{args.tag}/" - ) + for jdl in listdir(f"{user_condor_dir}/{args.processor}/{args.tag}/") if jdl.endswith(".jdl") ] -jdl_dict = { - sample: np.sort( +# get the highest numbered .jdl file to know how many output files there should be +jdl_dict = {} +for sample in samples.copy(): + sorted_jdls = np.sort( [ int(jdl[:-4].split("_")[-1]) for jdl in jdls if jdl.split("_")[0] == args.year and "_".join(jdl.split("_")[1:-1]) == sample ] - )[-1] - + 1 - for sample in samples -} + ) + + if len(sorted_jdls): + jdl_dict[sample] = sorted_jdls[-1] + 1 + else: + # if for some reason a folder exists in EOS but no .jdl file + samples.remove(sample) running_jobs = [] if args.check_running: - os.system("condor_q | awk '{print $9}' > running_jobs.txt") + os.system(f"condor_q {args.user}" "| awk '{print $9}' > running_jobs.txt") with Path("running_jobs.txt").open() as f: lines = f.readlines() @@ -81,8 +85,10 @@ print(f"Job #{i} for sample {sample} is running.") continue - jdl_file = f"condor/{args.processor}/{args.tag}/{args.year}_{sample}_{i}.jdl" - err_file = f"condor/{args.processor}/{args.tag}/logs/{args.year}_{sample}_{i}.err" + jdl_file = ( + f"{user_condor_dir}/{args.processor}/{args.tag}/{args.year}_{sample}_{i}.jdl" + ) + err_file = f"{user_condor_dir}/{args.processor}/{args.tag}/logs/{args.year}_{sample}_{i}.err" print(jdl_file) missing_files.append(jdl_file) err_files.append(err_file) @@ -114,8 +120,10 @@ continue print_red(f"Missing output pickle #{i} for sample {sample}") - jdl_file = f"condor/{args.processor}/{args.tag}/{args.year}_{sample}_{i}.jdl" - err_file = f"condor/{args.processor}/{args.tag}/logs/{args.year}_{sample}_{i}.err" + jdl_file = f"{user_condor_dir}/{args.processor}/{args.tag}/{args.year}_{sample}_{i}.jdl" + err_file = ( + f"{user_condor_dir}/{args.processor}/{args.tag}/logs/{args.year}_{sample}_{i}.err" + ) missing_files.append(jdl_file) err_files.append(err_file) if args.submit_missing: