From fdfe917350023cd2179514e81cecb0d4674bc23c Mon Sep 17 00:00:00 2001 From: Camille Coti <camille.coti@lipn.univ-paris13.fr> Date: Mon, 29 Jul 2019 17:24:18 -0700 Subject: [PATCH] Seems to fix the deadlock --- src/hierarchical.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/hierarchical.cpp b/src/hierarchical.cpp index a912afb..b5d86f3 100644 --- a/src/hierarchical.cpp +++ b/src/hierarchical.cpp @@ -61,7 +61,7 @@ void create_communicators_hierarch( MPI_Comm& COMM_FOREMEN, MPI_Comm& COMM_TEAM } -gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_int_t& J, int size, parameters_t params, gi::lst symbols, MPI_Comm comm_team ) { +gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_int_t& J, int size, parameters_t params, gi::lst symbols, MPI_Comm comm_team, int rank_foreman /* DEBUG */ ) { gi::ex Tens = 0; gi::ex A; @@ -149,7 +149,7 @@ gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_i running = np - 1; // all the slaves are running while( running > 0 ) { /* Here we might also receive a TAG_PULL if the data set is too small */ - MPI_Recv( &len, 1, MPI_UNSIGNED, MPI_ANY_SOURCE, MPI_ANY_TAG/* TAG_RES*/, comm_team, &status ); + MPI_Recv( &len, 1, MPI_UNSIGNED, MPI_ANY_SOURCE, /*MPI_ANY_TAG/*/ TAG_RES, comm_team, &status ); src = status.MPI_SOURCE; if( len != 0 ) { @@ -178,7 +178,6 @@ gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_i } - if( NULL != expr_c) free( expr_c ); return Tens; } @@ -231,8 +230,8 @@ void multiply_1level_foreman_hierarch( tensor3D_t& T, matrix_int_t& J, int size, if( status.MPI_TAG == TAG_WORK ){ /* Distribute the work on my workers */ - - Tens = multiply_1level_foreman_hierarch_distribute_work( T, J, size, params, symbols, comm_team ); + + Tens = multiply_1level_foreman_hierarch_distribute_work( T, J, size, params, symbols, comm_team, rank /* DEBUG */ ); /* Send the result to the master */ -- GitLab