From 95072d94aeffbb45a4f4579290e7cb1da206d621 Mon Sep 17 00:00:00 2001 From: Camille Coti <camille.coti@lipn.univ-paris13.fr> Date: Tue, 30 Jul 2019 16:16:42 -0700 Subject: [PATCH] Fixed the communicator construction when the teams have different sizes --- src/hierarchical.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/hierarchical.cpp b/src/hierarchical.cpp index b5d86f3..6104e10 100644 --- a/src/hierarchical.cpp +++ b/src/hierarchical.cpp @@ -1,6 +1,7 @@ #include <iostream> #include <mpi.h> #include <ginac/ginac.h> +#include <math.h> // ceil #include "products.h" #include "utils_parall.h" @@ -23,7 +24,7 @@ void create_communicators_hierarch( MPI_Comm& COMM_FOREMEN, MPI_Comm& COMM_TEAM color_foreman is set to 1 if I am the root or a foreman */ int color_foreman, workers_per_foreman; - workers_per_foreman = ( np - 1) / NBFOREMEN; + workers_per_foreman = ceil( (double)( np - 1) / (double)NBFOREMEN ); if( ROOT == rank ) { color_foreman = 1; } else { @@ -42,10 +43,8 @@ void create_communicators_hierarch( MPI_Comm& COMM_FOREMEN, MPI_Comm& COMM_TEAM if( ROOT == rank ) { color_team = 0; } else { - workers_per_foreman = ( np - 1) / NBFOREMEN; - color_team = 1 + ( (rank-1) / workers_per_foreman ); + color_team = 1 + floor( ( (rank-1) / workers_per_foreman ) ); } - MPI_Comm_split( MPI_COMM_WORLD, color_team, rank, &COMM_TEAM ); #if DEBUG @@ -99,7 +98,7 @@ gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_i } /* Distribute the work */ - /* Very copy/paste from multiply_1level_master -> possible refactoing here */ + /* Very copy/paste from multiply_1level_master -> possible refactoring here */ while( input.size() > 0 ) { MPI_Recv( &len, 1, MPI_UNSIGNED, MPI_ANY_SOURCE, MPI_ANY_TAG, comm_team, &status ); -- GitLab