#!/usr/bin/env python3 import os import sys from itertools import product from functools import reduce from collections import ChainMap sbatch_header = """\ #!/bin/bash # #SBATCH --job-name={experiment_name}_{model_name}_n{nodes}-th{threads} #SBATCH --ntasks={nodes} #SBATCH --cpus-per-task={threads} # Load openmpi module module load gcc/8.3.0/openmpi/3.1.4 # Experiments """ def create_folder(path): """Creates a folder if it does not exist Parameters ---------- path : str Path of the new folder Examples -------- >>> create_folder('./results') """ if not os.path.exists(path): os.makedirs(path) def generate_model_instance_path(model_name, model_instance, extension, paths): """Generates the absolute path of a model instance Parameters ---------- model_name : str Name of the model model_instance : str Instance of the model extension : str File extension of the model instance paths : dict Dictionary with the paths of the project Returns ------- str Absolute path of the model instance """ name = f"{model_instance}.{extension}" model_path = os.path.join(paths["models"], model_name, name) return model_path def generate_formula_path(identifier, extension, model_name, model_instance, paths): """Generates the absolute path of a formula Parameters ---------- identifier : int Formula identifier extension : str Formula extension model_name : str Model that verifies the formula model_instance : str Instance of the model that verifies the formula paths : dict Dictionary with the paths of the project Returns ------- str Formula path """ name = f"{model_instance}-{identifier}.{extension}" formula_path = os.path.join(paths["formulas"], model_name, model_instance, name) return formula_path def pmcsog_run(parameters, threads, model_name, model_instance, formula, paths): """Generates the string with the command to execute pmc-sog Parameters ---------- parameters : dict Dictionary with the parameters for pmc-sog threads : int Number of threads model_name : str Name of the model model_instance : str Name of the model instance formula : int Identifier of the the formula to be verified paths : dict Dictionary with the paths of the project Returns ------- str Command to execute pmc-sog """ formula = generate_formula_path(formula, 'ltl.reduced', model_name, model_instance, paths) model = generate_model_instance_path(model_name, model_instance, 'net', paths) tool = os.path.join(paths['tools'], 'pmc-sog') parallelisation = parameters['parallelisation'] algorithm = parameters['strategy'].strip() algorithm = '"{}"'.format(algorithm) if algorithm != "default" else '' return f"{tool} {parallelisation} {threads} {model} {formula} {algorithm}" def ltsmin_run(parameters, threads, model_name, model_instance, formula, paths): """Generates the string with the command to execute pnml2lts-mc Parameters ---------- parameters : dict Dictionary with the parameters for pnml2lts-mc threads : int Number of threads model_name : str Name of the model model_instance : str Name of the model instance formula : int Identifier of the the formula to be verified paths : dict Dictionary with the paths of the project Returns ------- str Command to execute pnml2lts-mc """ tool = os.path.join(paths['tools'], 'pnml2lts-mc') formula = generate_formula_path(formula, 'ltl', model_name, model_instance, paths) model = generate_model_instance_path(model_name, model_instance, 'pnml', paths) strategy = parameters["strategy"] size = parameters["size"] return f"{tool} --strategy={strategy} --size={size} --threads={threads} --ltl={formula} {model}" def tool_command(tool_dict, threads, model_name, model_instance, formula, paths): """Factory method that returns the correct command depending on the tool Parameters ---------- tool_dict : dict Dictionary containing the parameters of the tool threads : int Number of threads model_name : str Name of the model model_instance : str Name of the model instance formula : int Identifier of the formula paths : dict Dictionary with paths of the project Returns ------- str Command of the tool """ tool_name = tool_dict['name'] tool_parameters = tool_dict['parameters'] command = "" if (tool_name == "pmc-sog"): command = pmcsog_run(tool_parameters, threads, model_name, model_instance, formula, paths) elif (tool_name == "pnml2lts-mc"): command = ltsmin_run(tool_parameters, threads, model_name, model_instance, formula, paths) else: sys.exit("{} is not handled yet".format(tool_name)) return command def srun(command, nodes, threads, timeout, job_name, output_folder): """Generates the string to execute a task on the cluster Parameters ---------- command : str Command to be executed nodes : int Number of nodes used to run the task threads : int Number of threads used to run the task job_name : str Name of the task output_folder : str absolute path where the logs will be saved Returns ------- str SRUN command """ error_file = f"{output_folder}/{job_name}.err" output_file = f"{output_folder}/{job_name}.out" return f"srun -n {nodes} --resv-ports --cpus-per-task={threads} --time={timeout} --output={output_file} --error={error_file} --job-name={job_name} {command}" def generate_experiment_name(tool_dict): """Generate the name of the experiment""" tool_name = tool_dict["name"] tool_params_dict = tool_dict["parameters"] tool_parameters = tool_dict["parameters"][ "parallelisation"] + "_" if tool_name == "pmc-sog" else '' tool_parameters += tool_params_dict["strategy"] tool_parameters = reduce( (lambda s, v: s.replace(*v)), [['(poprem)', '-default'], ['(poprem shy)', '-shy'], ['Cou', 'couv']], tool_parameters) return f"{tool_name}_{tool_parameters}" def generate_sbatch(tool_dict, nodes, threads, model_dict, formulas_ids, timeout, paths): """Generates a slurm batch of a experiment to be executed on the cluster Parameters --------- tool_dict : dict Dictionary with an instance of a tool nodes : int Number of nodes threads : int Number of threads model_dict : dict Dictionary with a model information formulas_ids : list of int List of ids of the formulas to be verified timeout : int Timeout of the experiment paths : dict Dictionary with the paths of the project """ tool_name = tool_dict["name"] model_name = model_dict['name'] model_instances = model_dict['instances'] experiment_name = generate_experiment_name(tool_dict) header = sbatch_header.format(experiment_name=experiment_name, model_name=model_name, nodes=nodes, threads=threads) sbatch_folder = os.path.join(paths['slurm'], 'experiments', tool_name, experiment_name, model_name) create_folder(sbatch_folder) sbatch_name = f"n{nodes}-th{threads}.sbatch" sbatch_file = os.path.join(sbatch_folder, sbatch_name) with open(sbatch_file, 'w') as sbatch_file: sbatch_file.write(header) # print srun command for each model_instance for model_instance in model_instances: output_folder = os.path.join(paths['results'], tool_name, experiment_name, model_name, model_instance) create_folder(output_folder) for formula in formulas_ids: command = tool_command(tool_dict, threads, model_name, model_instance, formula, paths) job_name = f"{tool_name}_{model_instance}-n{nodes}-th{threads}-f{formula}" srun_command = srun(command, nodes, threads, timeout, job_name, output_folder) sbatch_file.write(srun_command) sbatch_file.write("\n\n") def create_default_paths(): """Create the default path for the project""" base_folder = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) paths = { # Absolute path where are stored the formulas, models, and scripts 'project': base_folder, # Folder where the formulas are saved 'formulas': os.path.join(base_folder, "formulas"), # Folder where the models are saved 'models': os.path.join(base_folder, "models"), # Folder where the results will be saved 'results': os.path.join(base_folder, "results"), # Folder where the slurm batches will be saved 'slurm': os.path.join(base_folder, "slurm"), # Folder where the tool are saved 'tools': os.path.join(base_folder, "tools") } # Create paths if they don't exist for path in paths.values(): create_folder(path) return paths def explode_tool(tool): """Generates a tool dictionary for each parameter""" parameters = [[{ name: value } for value in values] for name, values in tool["parameters"].items()] parameters = product(*parameters) result = [{ "name": tool["name"], "parameters": dict(ChainMap(*parameter)) } for parameter in parameters] return result def generate_multiple_sbatchs(tools, models, formulas, nodes_list, threads_list, timeout, paths): """Generates the slurm batch for several experiments Parameters ---------- tools : dict Dictionary with all the tools and their parameters models : dict Dictionary with all the models and their instances formulas : list of int List with the formula identifiers to be verified nodes_list : list of int List with all the nodes to be used threads_list : list of int List with all the threads to be used timeout : int Time in minutes of each experiment paths : dict Dictionary with the paths of the project """ for tool_dict in tools: tools_dict = explode_tool(tool_dict) for tool in tools_dict: print(tool) for model in models: for nodes in nodes_list: for threads in threads_list: generate_sbatch(tool, nodes, threads, model, formulas, timeout, paths) if __name__ == '__main__': # Default paths paths = create_default_paths() # Timeout: 10 minutes timeout = 10 # Number of nodes nodes = [2] # Number of threads threads = [8, 16] # Formulas to be verified nb_formulas = 200 formulas = [n for n in range(1, nb_formulas + 1)] # Models to be run models = [{ # "name": "philo", # "instances": ["philo5", "philo10", "philo20"] # }, { # "name": "train", # "instances": ["train12", "train24", "train48", "train96"] # }, { # "name": "tring", # "instances": ["tring5", "tring10", "tring20"] # }, { "name": "robot", "instances": ["robot20", "robot50"] #, "robot2", "robot5", "robot10"] }, { "name": "spool", "instances": ["spool4", "spool5"] #, "spool1", "spool2", "spool3"] }] # Tools to be compared tools = [{ "name": "pmc-sog", "parameters": { "parallelisation": ['otf'] #, 'otfP', 'otfPR', 'otfC'], "strategy": ['Cou99(poprem)', 'Cou99(poprem shy)'] #, 'default'] } #}, { # "name": "pnml2lts-mc", # "parameters": { # "size": ["90%"], # "strategy": ['dfs', 'ndfs'] # } }] generate_multiple_sbatchs(tools, models, formulas, nodes, threads, timeout, paths)