-
Notifications
You must be signed in to change notification settings - Fork 1
/
pattern_collection.sh
executable file
·74 lines (60 loc) · 2.89 KB
/
pattern_collection.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/bin/bash
# Since this manipulates a lot of data in the working directory,
# it's imperative that only a single instance of this script is
# running at any time.
shopt -s extglob
# generate the experiment files
parameterFiles=($(swipl -g "generate_experiment(Experiment_File), write(Experiment_File), halt" -l experiment_generator.pl | tr -d '[]' | tr ',' ' '))
# The number of pieces in which to split each experiment.
# This number has to be at least as high (ideally equal) as the
# number of cores/threds you wish to employ simultaneously
parts="$1"
# the archival directory storing the output and the experiment file
archivedir=output
# NOTE that originally experiment_generator.pl generated multiple experiments at once,
# hence the iteration here. In the last version, we've assumed only 1 set of parameters
# is processed at once, making this iteration redundant.
# For ease of use, we copy $element to the given filepath $2. If this iteration were to be reinstated,
# this would have to be changed.
for element in "${parameterFiles[@]}";
do
# clean the working directory
#rm -f *.pl.split*
# split the experiment into $parts pieces
swipl -g "split(${parts}, '${element}'), halt" -l experiment_splitter.pl
split_output_files=()
# generate a file containing the commands to be executed
for split_file in ${element}.split+([0-9]);
do
split_output_files+=(\'"${split_file}".output\')
test="'$split_file'"
echo "consult(experiment_framework), generate_experiment("${test}"), halt" >> argfile
done
# run the commands in parallel
#parallel --gnu --ungroup --joblog joblog --workdir $workdir --sshloginfile nodes2 --delay 1 "yap -z" :::: argfile
parallel --progress --gnu --ungroup --delay 1 "yap -z " :::: argfile
# merge the output into a single file
split_output_files_list=$(IFS=,; echo "[${split_output_files[*]}]")
# for some reason YAP ends up hanging on the findall/3, instead, stick to swipl
swipl -g "consult(experiment_merger), merge(${split_output_files_list}, new_final_output_file), halt"
#yap -z "consult(experiment_merger), merge(${split_output_files_list}), halt" > new_final_output_file
# generate a random directory to write the output in
unique_directory=$(head -c 500 /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
mkdir -p $archivedir/$unique_directory
cp $element "$2"
cp new_final_output_file "$3"
mv new_final_output_file $archivedir/${unique_directory}/output
mv $element $archivedir/${unique_directory}/
# clean the working directory
##mkdir ${unique_directory}
for split_file in ${element}.split+([0-9]);
do
##echo 'not removing file'
##mv $split_file ${unique_directory}/
##mv ${split_file}.output ${unique_directory}/
rm $split_file
rm ${split_file}.output
done
rm argfile
done
shopt -u extglob