Commit a589976d authored by iker_martin's avatar iker_martin
Browse files

Modified scripts to enable usage of lists of strats for complex config files

parent cf716c90
......@@ -8,6 +8,8 @@ GENERAL_SECTION = "[general]"
RESIZE_SECTION = "[resize"
STAGE_SECTION = "[stage"
END_SECTION_DELIMITER = ";end"
DIFFERENT_VALUE_DELIMITER=':'
LIST_VALUE_DELIMITER=','
class Config_section(Enum):
INVALID=0
......@@ -61,29 +63,36 @@ def is_a_stage_section(line):
return True
return False
def convert_to_number(number):
res = None
try:
res = float(number)
if res == int(number):
res = int(number)
except ValueError:
if isinstance(number, str):
res = number
else:
print("Unable to convert to number - Not a fatal error")
return res
def process_line(line, data):
key,value = line.split('=')
if(not Config_section.has_key(key)):
print("Unknown parameter " + key)
return False
if(',' in value):
value = value.split(',')
for i in range(len(value)):
try:
value[i] = float(value[i])
if value[i] == int(value[i]):
value[i] = int(value[i])
except ValueError:
print("Unable to convert to number - Not a fatal error")
else:
try:
value = float(value)
if value == int(value):
value = int(value)
except ValueError:
print("Unable to convert to number - Not a fatal error")
value = value.split(DIFFERENT_VALUE_DELIMITER) # Some keys have values that will be swapped between files
for i in range(len(value)):
value[i] = value[i].split(LIST_VALUE_DELIMITER) # Final config files could have multiple values for the same key
for j in range(len(value[i])):
value[i][j] = convert_to_number(value[i][j])
if len(value[i]) > 1:
value[i] = tuple(value[i])
elif len(value[i]) == 1:
value[i] = value[i][j]
if len(value) == 1:
value = value[0]
data[key]=value
return True
......@@ -120,58 +129,64 @@ def process_file(file_name):
f.close()
return general_data,stages_data,resizes_data
def key_line_write(f, keys, values):
for i in range(len(keys)):
f.write(keys[i] + "=")
if type(values[i]) == tuple:
f.write(str(values[0]))
for j in range(len(1, values[i])):
f.write("," + str(values[i]) )
else:
f.write(str(values[i]))
f.write("\n")
def general_section_write(f, general_data):
f.write(GENERAL_SECTION + "\n")
keys = list(general_data.keys())
values = list(general_data.values())
for i in range(len(keys)):
f.write(keys[i] + "=" + str(values[i]) + "\n")
key_line_write(f, keys, values)
f.write(END_SECTION_DELIMITER + " " + GENERAL_SECTION + "\n")
def stage_section_write(f, stage_data, section_index):
f.write(STAGE_SECTION + str(section_index) + "]\n")
keys = list(stage_data.keys())
values = list(stage_data.values())
for i in range(len(keys)):
f.write(keys[i] + "=" + str(values[i]) + "\n")
key_line_write(f, keys, values)
f.write(END_SECTION_DELIMITER + " " + STAGE_SECTION + str(section_index) + "]\n")
def resize_section_write(f, resize_data, section_index):
f.write(RESIZE_SECTION + str(section_index) + "]\n")
keys = list(resize_data.keys())
values = list(resize_data.values())
for i in range(len(keys)):
f.write(keys[i] + "=" + str(values[i]) + "\n")
key_line_write(f, keys, values)
f.write(END_SECTION_DELIMITER + " " + RESIZE_SECTION + str(section_index) + "]\n")
def write_output_file(datasets, common_output_name, output_index):
file_name = common_output_name + str(output_index) + ".ini"
total_stages=int(datasets[0][Config_section.P_TOTAL_STAGES.value])
total_resizes=int(datasets[0][Config_section.P_TOTAL_RESIZES.value])+1
total_groups=int(datasets[0][Config_section.P_TOTAL_RESIZES.value])+1
f = open(file_name, "w")
general_section_write(f, datasets[0])
for i in range(total_stages):
stage_section_write(f, datasets[i+1], i)
for i in range(total_resizes):
for i in range(total_groups):
resize_section_write(f, datasets[i+1+total_stages], i)
f.close()
def check_sections_assumptions(datasets):
total_resizes=int(datasets[0][Config_section.P_TOTAL_RESIZES.value])+1
total_groups=int(datasets[0][Config_section.P_TOTAL_RESIZES.value])+1
total_stages=int(datasets[0][Config_section.P_TOTAL_STAGES.value])
adr = datasets[0][Config_section.P_ADR.value]
for i in range(total_resizes):
#Not valid if trying to use thread strategy and adr(Async data) is 0
if adr==0 and (datasets[total_stages+1+i][Config_section.P_RESIZE_SPAWN_STRATEGY.value] == 2 or datasets[total_stages+1+i][Config_section.P_RESIZE_REDISTRIBUTION_STRATEGY.value] == 2):
return False
#Not valid if the strategies are different
if datasets[total_stages+1+i][Config_section.P_RESIZE_SPAWN_STRATEGY.value] != datasets[total_stages+1+i][Config_section.P_RESIZE_REDISTRIBUTION_STRATEGY.value]:
return False
for i in range(total_groups):
#Not valid if resize is to the same amount of processes
if i>0:
if datasets[total_stages+1+i][Config_section.P_RESIZE_PROCS.value] == datasets[total_stages+i][Config_section.P_RESIZE_PROCS.value]:
......@@ -225,14 +240,9 @@ def create_output_files(common_output_name, general_data, resize_data, stage_dat
datasets.append(dataset)
write_datasets.append(dataset.copy())
directory = "/Desglosed-" + str(date.today())
path = os.getcwd() + directory
os.mkdir(path, mode=0o775)
os.chdir(path)
lists=[] # Stores lists of those variables with multiple values
keys=[] # Stores keys of those variables with multiple values
indexes=[] # Stores actual index for each variable with multiple values
indexes=[] # Stores actual index for each variable with multiple values. Always starts at 0.
mindexes=[] # Stores len of lists of each variable with multiple values
ds_indexes=[] # Stores the index of the dataset where the variable is stored
#For each variable with a list of elements
......@@ -247,6 +257,10 @@ def create_output_files(common_output_name, general_data, resize_data, stage_dat
indexes.append(0)
mindexes.append(len(values_aux[j]))
directory = "/Desglosed-" + str(date.today())
path = os.getcwd() + directory
os.mkdir(path, mode=0o775)
os.chdir(path)
#Get the first set of values
for i in range(len(lists)):
......@@ -260,7 +274,8 @@ def create_output_files(common_output_name, general_data, resize_data, stage_dat
output_index=0
adr_corrected=False
while True:
finished = False
while not finished:
if(check_sections_assumptions(write_datasets)):
write_output_file(write_datasets, common_output_name, output_index)
# for i in range(len(write_datasets)):
......@@ -268,9 +283,7 @@ def create_output_files(common_output_name, general_data, resize_data, stage_dat
# print("\n\n\n------------------------------------------" + str(output_index) + " ADR=" + str(adr_corrected))
output_index+=1
finished = read_parameter(0)
if finished:
break
#=====================================================
if(len(sys.argv) < 3):
print("Not enough arguments given.\nExpected usage: python3 read_multiple.py file.ini output_name")
......
......@@ -46,16 +46,11 @@ then
fi
nodelist=$SLURM_JOB_NODELIST
nodes=$SLURM_JOB_NUM_NODES
if [ -z "$nodelist" ];
then
echo "Internal ERROR in generalRun.sh - Nodelist not provided"
exit -1
fi
if [ -z "$nodes" ];
then
nodes=1
fi
numP=$(bash $dir$execDir/BashScripts/getNumPNeeded.sh $configFile 0)
initial_nodelist=$(bash $dir$execDir/BashScripts/createInitialNodelist.sh $numP $cores $nodelist)
......@@ -66,7 +61,7 @@ if [ $use_extrae -ne 1 ]
then
for ((i=0; i<qty; i++))
do
mpirun -hosts $initial_nodelist -np $numP $dir$codeDir/a.out $configFile $outFileIndex $nodelist $nodes
mpirun -hosts $initial_nodelist -np $numP $dir$codeDir/a.out $configFile $outFileIndex
done
else
cp $dir$execDir/Extrae/extrae.xml .
......@@ -75,7 +70,7 @@ else
for ((i=0; i<qty; i++))
do
#FIXME Extrae not tested keeping in mind the initial nodelist - Could have some errors
srun -n$numP --mpi=pmi2 ./trace.sh $dir$codeDir/a.out $configFile $outFileIndex $nodelist $nodes
srun -n$numP --mpi=pmi2 ./trace.sh $dir$codeDir/a.out $configFile $outFileIndex
done
fi
......
......@@ -55,7 +55,6 @@ fi
numP=$(bash $dir$execDir/BashScripts/getNumPNeeded.sh $configFile 0)
nodelist=$SLURM_JOB_NODELIST
nodes=$SLURM_JOB_NUM_NODES
if [ -z "$nodelist" ];
then
nodelist="localhost"
......@@ -63,10 +62,6 @@ then
else
initial_nodelist=$(bash $dir$execDir/BashScripts/createInitialNodelist.sh $numP $cores $nodelist)
fi
if [ -z "$nodes" ];
then
nodes=1
fi
#EXECUTE RUN
echo "Nodes=$nodelist"
......@@ -74,7 +69,7 @@ if [ $use_extrae -ne 1 ]
then
for ((i=0; i<qty; i++))
do
mpirun -hosts $initial_nodelist -np $numP $dir$codeDir/a.out $configFile $outFileIndex $nodelist $nodes
mpirun -hosts $initial_nodelist -np $numP $dir$codeDir/a.out $configFile $outFileIndex
done
else
cp $dir$execDir/Extrae/extrae.xml .
......@@ -82,7 +77,7 @@ else
cp $dir$execDir/Extrae/trace_worker.sh .
for ((i=0; i<qty; i++))
do
mpirun -hosts $initial_nodelist -np $numP ./trace.sh $dir$codeDir/a.out $configFile $outFileIndex $nodelist $nodes
mpirun -hosts $initial_nodelist -np $numP ./trace.sh $dir$codeDir/a.out $configFile $outFileIndex
done
fi
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment