sbatch_generator.py 16.3 KB
Newer Older
Jaime Arias's avatar
Jaime Arias committed
1
2
3
#!/usr/bin/env python3

import os
Jaime Arias's avatar
Jaime Arias committed
4
import stat
Jaime Arias's avatar
Jaime Arias committed
5
6
import sys
from collections import ChainMap
Jaime Arias's avatar
Jaime Arias committed
7
8
9
from functools import reduce
from itertools import product
from time import time
Jaime Arias's avatar
Jaime Arias committed
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24

sbatch_header = """\
#!/bin/bash
#
#SBATCH --job-name={experiment_name}_{model_name}_n{nodes}-th{threads}
#SBATCH --ntasks={nodes}
#SBATCH --cpus-per-task={threads}

# Load openmpi module
module load gcc/8.3.0/openmpi/3.1.4

# Experiments

"""

Jaime Arias's avatar
Jaime Arias committed
25
26
27
oar_header = """\
#!/bin/bash
#
Jaime Arias's avatar
Jaime Arias committed
28
#OAR --name {experiment_name}
Jaime Arias's avatar
Jaime Arias committed
29
30
31
32
33
34
35
36
#OAR --resource /nodes={nodes}/cpu=1/core={threads},walltime={timeout}
#OAR --stderr {error_file}
#OAR --stdout {output_file}

# Experiments

"""

Jaime Arias's avatar
Jaime Arias committed
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78

def create_folder(path):
    """Creates a folder if it does not exist

    Parameters
    ----------
    path : str
        Path of the new folder

    Examples
    --------

    >>> create_folder('./results')
    """
    if not os.path.exists(path):
        os.makedirs(path)


def generate_model_instance_path(model_name, model_instance, extension, paths):
    """Generates the absolute path of a model instance

    Parameters
    ----------
    model_name : str
        Name of the model
    model_instance : str
        Instance of the model
    extension : str
        File extension of the model instance
    paths : dict
        Dictionary with the paths of the project

    Returns
    -------
    str
        Absolute path of the model instance
    """
    name = f"{model_instance}.{extension}"
    model_path = os.path.join(paths["models"], model_name, name)
    return model_path


Jaime Arias's avatar
Jaime Arias committed
79
def generate_formula_path(identifier, extension, model_name, model_instance, paths):
Jaime Arias's avatar
Jaime Arias committed
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
    """Generates  the absolute path of a formula

    Parameters
    ----------
    identifier : int
        Formula identifier
    extension : str
        Formula extension
    model_name : str
        Model that verifies the formula
    model_instance : str
        Instance of the model that verifies the formula
    paths : dict
        Dictionary with the paths of the project

    Returns
    -------
    str
        Formula path
    """
    name = f"{model_instance}-{identifier}.{extension}"
Jaime Arias's avatar
Jaime Arias committed
101
    formula_path = os.path.join(paths["formulas"], model_name, model_instance, name)
Jaime Arias's avatar
Jaime Arias committed
102
103
104
    return formula_path


Jaime Arias's avatar
Jaime Arias committed
105
def pmcsog_run(parameters, threads, model_name, model_instance, formula, paths):
Jaime Arias's avatar
Jaime Arias committed
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
    """Generates the string with the command to execute pmc-sog

    Parameters
    ----------
    parameters : dict
        Dictionary with the parameters for pmc-sog
    threads : int
        Number of threads
    model_name : str
        Name of the model
    model_instance : str
        Name of the model instance
    formula : int
        Identifier of the the formula to be verified
    paths : dict
        Dictionary with the paths of the project

    Returns
    -------
    str
        Command to execute pmc-sog
    """
Jaime Arias's avatar
Jaime Arias committed
128
129
130
131
132
133
    formula = generate_formula_path(
        formula, "ltl.reduced", model_name, model_instance, paths
    )
    model = generate_model_instance_path(model_name, model_instance, "net", paths)
    tool = os.path.join(paths["tools"], "pmc-sog")
    parallelisation = parameters["parallelisation"]
Jaime Arias's avatar
Jaime Arias committed
134

Jaime Arias's avatar
Jaime Arias committed
135
136
    algorithm = parameters["strategy"].strip()
    algorithm = '"{}"'.format(algorithm) if algorithm != "default" else ""
Jaime Arias's avatar
Jaime Arias committed
137
138
139
140

    return f"{tool} {parallelisation} {threads} {model} {formula} {algorithm}"


Jaime Arias's avatar
Jaime Arias committed
141
def ltsmin_run(parameters, threads, model_name, model_instance, formula, paths):
Jaime Arias's avatar
Jaime Arias committed
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
    """Generates the string with the command to execute pnml2lts-mc

    Parameters
    ----------
    parameters : dict
        Dictionary with the parameters for pnml2lts-mc
    threads : int
        Number of threads
    model_name : str
        Name of the model
    model_instance : str
        Name of the model instance
    formula : int
        Identifier of the the formula to be verified
    paths : dict
        Dictionary with the paths of the project

    Returns
    -------
    str
        Command to execute pnml2lts-mc
    """
Jaime Arias's avatar
Jaime Arias committed
164
165
166
    tool = os.path.join(paths["tools"], "pnml2lts-mc")
    formula = generate_formula_path(formula, "ltl", model_name, model_instance, paths)
    model = generate_model_instance_path(model_name, model_instance, "pnml", paths)
Jaime Arias's avatar
Jaime Arias committed
167
168
169
170
171
172
173

    strategy = parameters["strategy"]
    size = parameters["size"]

    return f"{tool} --strategy={strategy} --size={size} --threads={threads} --ltl={formula} {model}"


Jaime Arias's avatar
Jaime Arias committed
174
def tool_command(tool_dict, threads, model_name, model_instance, formula, paths):
Jaime Arias's avatar
Jaime Arias committed
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
    """Factory method that returns the correct command depending on the tool

    Parameters
    ----------
    tool_dict : dict
        Dictionary containing the parameters of the tool
    threads : int
        Number of threads
    model_name : str
        Name of the model
    model_instance : str
        Name of the model instance
    formula : int
        Identifier of the formula
    paths : dict
        Dictionary with paths of the project

    Returns
    -------
    str
        Command of the tool
    """
Jaime Arias's avatar
Jaime Arias committed
197
198
    tool_name = tool_dict["name"]
    tool_parameters = tool_dict["parameters"]
Jaime Arias's avatar
Jaime Arias committed
199
200

    command = ""
Jaime Arias's avatar
Jaime Arias committed
201
202
203
204
205
206
207
208
    if tool_name == "pmc-sog":
        command = pmcsog_run(
            tool_parameters, threads, model_name, model_instance, formula, paths
        )
    elif tool_name == "pnml2lts-mc":
        command = ltsmin_run(
            tool_parameters, threads, model_name, model_instance, formula, paths
        )
Jaime Arias's avatar
Jaime Arias committed
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
    else:
        sys.exit("{} is not handled yet".format(tool_name))

    return command


def srun(command, nodes, threads, timeout, job_name, output_folder):
    """Generates the string to execute a task on the cluster

    Parameters
    ----------
    command : str
        Command to be executed
    nodes : int
        Number of nodes used to run the task
    threads : int
        Number of threads used to run the task
    job_name : str
        Name of the task
    output_folder : str
        absolute path where the logs will be saved

    Returns
    -------
    str
        SRUN command
    """
    error_file = f"{output_folder}/{job_name}.err"
    output_file = f"{output_folder}/{job_name}.out"
    return f"srun -n {nodes} --resv-ports --cpus-per-task={threads} --time={timeout} --output={output_file} --error={error_file} --job-name={job_name} {command}"


Jaime Arias's avatar
Jaime Arias committed
241
242
243
244
245
246
247
def mpi_run(command, nodes, threads, timeout_minutes, job_name, output_folder):
    error_file = f"{output_folder}/{job_name}.err"
    output_file = f"{output_folder}/{job_name}.out"
    timeout = timeout_minutes * 60
    return f"mpirun -machinefile $OAR_NODEFILE --npernode {nodes} -cpus-per-proc {threads} --timeout {timeout} {command}  > {output_file} 2>{error_file}"


Jaime Arias's avatar
Jaime Arias committed
248
249
250
251
252
def generate_experiment_name(tool_dict):
    """Generate the name of the experiment"""
    tool_name = tool_dict["name"]

    tool_params_dict = tool_dict["parameters"]
Jaime Arias's avatar
Jaime Arias committed
253
254
255
256
257
    tool_parameters = (
        tool_dict["parameters"]["parallelisation"] + "_"
        if tool_name == "pmc-sog"
        else ""
    )
Jaime Arias's avatar
Jaime Arias committed
258
259
260
    tool_parameters += tool_params_dict["strategy"]
    tool_parameters = reduce(
        (lambda s, v: s.replace(*v)),
Jaime Arias's avatar
Jaime Arias committed
261
262
263
        [["(poprem)", "-default"], ["(poprem shy)", "-shy"], ["Cou", "couv"]],
        tool_parameters,
    )
Jaime Arias's avatar
Jaime Arias committed
264
265
266
267

    return f"{tool_name}_{tool_parameters}"


Jaime Arias's avatar
Jaime Arias committed
268
269
270
271
272
273
274
275
276
277
def generate_oar(
    tool_dict,
    nodes,
    threads,
    model_dict,
    formulas_ids,
    timeout,
    paths,
    oar_timeout="2:00:00",
):
Jaime Arias's avatar
Jaime Arias committed
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
    """Generates a slurm batch of a experiment to be executed on the cluster

    Parameters
    ---------
    tool_dict : dict
        Dictionary with an instance of a tool
    nodes : int
        Number of nodes
    threads : int
        Number of threads
    model_dict : dict
        Dictionary with a model information
    formulas_ids : list of int
        List of ids of the formulas to be verified
    timeout : int
        Timeout of the experiment
    paths : dict
        Dictionary with the paths of the project
    """
    tool_name = tool_dict["name"]
Jaime Arias's avatar
Jaime Arias committed
298
299
    model_name = model_dict["name"]
    model_instances = model_dict["instances"]
Jaime Arias's avatar
Jaime Arias committed
300
301
302
303

    experiment_name = generate_experiment_name(tool_dict)

    # folder where oar scripts will be saved
Jaime Arias's avatar
Jaime Arias committed
304
305
306
    oar_folder = os.path.join(
        paths["oar"], "experiments", tool_name, experiment_name, model_name
    )
Jaime Arias's avatar
Jaime Arias committed
307
308
309
310
    create_folder(oar_folder)

    # print srun command for each model_instance
    for model_instance in model_instances:
Jaime Arias's avatar
Jaime Arias committed
311
312
        oar_job = f"{experiment_name}_{model_instance}_n{nodes}-th{threads}"
        oar_file = os.path.join(oar_folder, f"{oar_job}.oar")
Jaime Arias's avatar
Jaime Arias committed
313
314

        # folder where the outputs will be saved
Jaime Arias's avatar
Jaime Arias committed
315
316
317
        output_folder = os.path.join(
            paths["results"], tool_name, experiment_name, model_name, model_instance
        )
Jaime Arias's avatar
Jaime Arias committed
318
319
        create_folder(output_folder)

Jaime Arias's avatar
Jaime Arias committed
320
321
        error_file = f"{output_folder}/{oar_job}.err"
        output_file = f"{output_folder}/{oar_job}.out"
Jaime Arias's avatar
Jaime Arias committed
322

Jaime Arias's avatar
Jaime Arias committed
323
324
325
326
327
328
329
330
331
        header = oar_header.format(
            experiment_name=oar_job,
            model_instance=model_instance,
            nodes=nodes,
            threads=threads,
            timeout=oar_timeout,
            error_file=error_file,
            output_file=output_file,
        )
Jaime Arias's avatar
Jaime Arias committed
332

Jaime Arias's avatar
Jaime Arias committed
333
        with open(oar_file, "w") as f:
Jaime Arias's avatar
Jaime Arias committed
334
335
336
            f.write(header)

            for formula in formulas_ids:
Jaime Arias's avatar
Jaime Arias committed
337
338
339
340
341
                command = tool_command(
                    tool_dict, threads, model_name, model_instance, formula, paths
                )

                job_name = f"{oar_job}-f{formula}"
Jaime Arias's avatar
Jaime Arias committed
342

Jaime Arias's avatar
Jaime Arias committed
343
344
345
346
347
                mpi_command = mpi_run(
                    command, nodes, threads, timeout, job_name, output_folder
                )

                f.write(mpi_command)
Jaime Arias's avatar
Jaime Arias committed
348
349
350
351
352
353
354
                f.write("\n\n")

        #  give oar script the exec right
        st = os.stat(oar_file)
        os.chmod(oar_file, st.st_mode | stat.S_IEXEC)


Jaime Arias's avatar
Jaime Arias committed
355
356
357
def generate_sbatch(
    tool_dict, nodes, threads, model_dict, formulas_ids, timeout, paths
):
Jaime Arias's avatar
Jaime Arias committed
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
    """Generates a slurm batch of a experiment to be executed on the cluster

    Parameters
    ---------
    tool_dict : dict
        Dictionary with an instance of a tool
    nodes : int
        Number of nodes
    threads : int
        Number of threads
    model_dict : dict
        Dictionary with a model information
    formulas_ids : list of int
        List of ids of the formulas to be verified
    timeout : int
        Timeout of the experiment
    paths : dict
        Dictionary with the paths of the project
    """
    tool_name = tool_dict["name"]
Jaime Arias's avatar
Jaime Arias committed
378
379
    model_name = model_dict["name"]
    model_instances = model_dict["instances"]
Jaime Arias's avatar
Jaime Arias committed
380
381
382

    experiment_name = generate_experiment_name(tool_dict)

Jaime Arias's avatar
Jaime Arias committed
383
384
385
386
387
388
    header = sbatch_header.format(
        experiment_name=experiment_name,
        model_name=model_name,
        nodes=nodes,
        threads=threads,
    )
Jaime Arias's avatar
Jaime Arias committed
389

Jaime Arias's avatar
Jaime Arias committed
390
391
392
    sbatch_folder = os.path.join(
        paths["slurm"], "experiments", tool_name, experiment_name, model_name
    )
Jaime Arias's avatar
Jaime Arias committed
393
394
395
396
    create_folder(sbatch_folder)

    sbatch_name = f"n{nodes}-th{threads}.sbatch"
    sbatch_file = os.path.join(sbatch_folder, sbatch_name)
Jaime Arias's avatar
Jaime Arias committed
397
    with open(sbatch_file, "w") as sbatch_file:
Jaime Arias's avatar
Jaime Arias committed
398
399
400
401
        sbatch_file.write(header)

        # print srun command for each model_instance
        for model_instance in model_instances:
Jaime Arias's avatar
Jaime Arias committed
402
403
404
            output_folder = os.path.join(
                paths["results"], tool_name, experiment_name, model_name, model_instance
            )
Jaime Arias's avatar
Jaime Arias committed
405
406
407
            create_folder(output_folder)

            for formula in formulas_ids:
Jaime Arias's avatar
Jaime Arias committed
408
409
410
                command = tool_command(
                    tool_dict, threads, model_name, model_instance, formula, paths
                )
Jaime Arias's avatar
Jaime Arias committed
411

Jaime Arias's avatar
Jaime Arias committed
412
413
414
                job_name = (
                    f"{tool_name}_{model_instance}-n{nodes}-th{threads}-f{formula}"
                )
Jaime Arias's avatar
Jaime Arias committed
415

Jaime Arias's avatar
Jaime Arias committed
416
417
418
                srun_command = srun(
                    command, nodes, threads, timeout, job_name, output_folder
                )
Jaime Arias's avatar
Jaime Arias committed
419
420
421
422
423
424
425
426

                sbatch_file.write(srun_command)
                sbatch_file.write("\n\n")


def create_default_paths():
    """Create the default path for the project"""
    base_folder = os.path.abspath(
Jaime Arias's avatar
Jaime Arias committed
427
428
        os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)
    )
Jaime Arias's avatar
Jaime Arias committed
429
430
431

    paths = {
        # Absolute path where are stored the formulas, models, and scripts
Jaime Arias's avatar
Jaime Arias committed
432
        "project": base_folder,
433
        # Folder where the formulas are saved
Jaime Arias's avatar
Jaime Arias committed
434
        "formulas": os.path.join(base_folder, "formulas"),
Jaime Arias's avatar
Jaime Arias committed
435
        # Folder where the models are saved
Jaime Arias's avatar
Jaime Arias committed
436
        "models": os.path.join(base_folder, "models"),
Jaime Arias's avatar
Jaime Arias committed
437
        # Folder where the results will be saved
Jaime Arias's avatar
Jaime Arias committed
438
        "results": os.path.join(base_folder, "results"),
Jaime Arias's avatar
Jaime Arias committed
439
        # Folder where the slurm batches will be saved
Jaime Arias's avatar
Jaime Arias committed
440
        "slurm": os.path.join(base_folder, "slurm"),
Jaime Arias's avatar
Jaime Arias committed
441
        # Folder where the oar batches will be saved
Jaime Arias's avatar
Jaime Arias committed
442
        "oar": os.path.join(base_folder, "oar"),
Jaime Arias's avatar
Jaime Arias committed
443
        # Folder where the tool are saved
Jaime Arias's avatar
Jaime Arias committed
444
        "tools": os.path.join(base_folder, "tools"),
Jaime Arias's avatar
Jaime Arias committed
445
446
447
448
449
450
451
452
453
454
455
    }

    # Create paths if they don't exist
    for path in paths.values():
        create_folder(path)

    return paths


def explode_tool(tool):
    """Generates a tool dictionary for each parameter"""
Jaime Arias's avatar
Jaime Arias committed
456
457
458
459
    parameters = [
        [{name: value} for value in values]
        for name, values in tool["parameters"].items()
    ]
Jaime Arias's avatar
Jaime Arias committed
460
461
    parameters = product(*parameters)

Jaime Arias's avatar
Jaime Arias committed
462
463
464
465
    result = [
        {"name": tool["name"], "parameters": dict(ChainMap(*parameter))}
        for parameter in parameters
    ]
Jaime Arias's avatar
Jaime Arias committed
466
467
468
469

    return result


Jaime Arias's avatar
Jaime Arias committed
470
471
472
def generate_multiple_batchs(
    tools, models, formulas, nodes_list, threads_list, timeout, paths, launcher
):
Jaime Arias's avatar
Jaime Arias committed
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
    """Generates the slurm batch for several experiments

    Parameters
    ----------
    tools : dict
        Dictionary with all the tools and their parameters
    models : dict
        Dictionary with all the models and their instances
    formulas : list of int
        List with the formula identifiers to be verified
    nodes_list : list of int
        List with all the nodes to be used
    threads_list : list of int
        List with all the threads to be used
    timeout : int
        Time in minutes of each experiment
    paths : dict
        Dictionary with the paths of the project
Jaime Arias's avatar
Jaime Arias committed
491
492
    cluster : string
        Name of the cluster
Jaime Arias's avatar
Jaime Arias committed
493
494
495
496
497
498
499
500
    """
    for tool_dict in tools:
        tools_dict = explode_tool(tool_dict)
        for tool in tools_dict:
            print(tool)
            for model in models:
                for nodes in nodes_list:
                    for threads in threads_list:
Jaime Arias's avatar
Jaime Arias committed
501
                        if launcher == "slurm":
Jaime Arias's avatar
Jaime Arias committed
502
503
504
                            generate_sbatch(
                                tool, nodes, threads, model, formulas, timeout, paths
                            )
Jaime Arias's avatar
Jaime Arias committed
505
                        elif launcher == "oar":
Jaime Arias's avatar
Jaime Arias committed
506
507
508
                            generate_oar(
                                tool, nodes, threads, model, formulas, timeout, paths
                            )
Jaime Arias's avatar
Jaime Arias committed
509
510
511
                        else:
                            print(f"{launcher} is not supported")
                            sys.exit(0)
Jaime Arias's avatar
Jaime Arias committed
512
513


Jaime Arias's avatar
Jaime Arias committed
514
if __name__ == "__main__":
Jaime Arias's avatar
Jaime Arias committed
515
516
517
    # Default paths
    paths = create_default_paths()

Jaime Arias's avatar
Jaime Arias committed
518
    # slurm or oar
Jaime Arias's avatar
Jaime Arias committed
519
    launcher = "oar"
Jaime Arias's avatar
Jaime Arias committed
520

Jaime Arias's avatar
Jaime Arias committed
521
522
    # Timeout: 20 minutes
    timeout = 20
Jaime Arias's avatar
Jaime Arias committed
523
524

    # Number of nodes
Jaime Arias's avatar
Jaime Arias committed
525
    nodes = [1, 2]
Jaime Arias's avatar
Jaime Arias committed
526
527

    # Number of threads
Jaime Arias's avatar
Jaime Arias committed
528
    threads = [16]
Jaime Arias's avatar
Jaime Arias committed
529
530
531
532
533
534

    # Formulas to be verified
    nb_formulas = 200
    formulas = [n for n in range(1, nb_formulas + 1)]

    # Models to be run
Jaime Arias's avatar
Jaime Arias committed
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
    models = [
        {
            #     "name": "philo",
            #     "instances": ["philo5", "philo10", "philo20"]
            # }, {
            #     "name": "train",
            #     "instances": ["train12", "train24", "train48", "train96"]
            # }, {
            "name": "tring",
            "instances": ["tring10"]  # "tring5", "tring20"
            # }, {
            #     "name":
            #     "robot",
            #     "instances": ["robot20"] #"robot20", "robot50", "robot2", "robot5", "robot10"]
            # }, {
            #   "name": "spool",
            #    "instances": ["spool4", "spool5"] #, "spool1", "spool2", "spool3"]
        }
    ]
Jaime Arias's avatar
Jaime Arias committed
554
555

    # Tools to be compared
Jaime Arias's avatar
Jaime Arias committed
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
    tools = [
        {
            "name": "pmc-sog",
            "parameters": {
                "parallelisation": [
                    "otfPOR",
                    "otfPRPOR",
                ],  # 'otf', 'otfPR', 'otfP', 'otfC',
                "strategy": ["Cou99(poprem)", "Cou99(poprem shy)"],  # , 'default']
            }
            #    }, {
            #        "name": "pnml2lts-mc",
            #        "parameters": {
            #            "size": ["90%"],
            #            "strategy": ['dfs', 'ndfs']
            #        }
Jaime Arias's avatar
Jaime Arias committed
572
        }
Jaime Arias's avatar
Jaime Arias committed
573
574
575
576
577
    ]

    generate_multiple_batchs(
        tools, models, formulas, nodes, threads, timeout, paths, launcher
    )