Skip to content
Snippets Groups Projects
Commit fdfe9173 authored by Camille Coti's avatar Camille Coti
Browse files

Seems to fix the deadlock

parent d3989c32
No related branches found
No related tags found
No related merge requests found
......@@ -61,7 +61,7 @@ void create_communicators_hierarch( MPI_Comm& COMM_FOREMEN, MPI_Comm& COMM_TEAM
}
gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_int_t& J, int size, parameters_t params, gi::lst symbols, MPI_Comm comm_team ) {
gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_int_t& J, int size, parameters_t params, gi::lst symbols, MPI_Comm comm_team, int rank_foreman /* DEBUG */ ) {
gi::ex Tens = 0;
gi::ex A;
......@@ -149,7 +149,7 @@ gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_i
running = np - 1; // all the slaves are running
while( running > 0 ) {
/* Here we might also receive a TAG_PULL if the data set is too small */
MPI_Recv( &len, 1, MPI_UNSIGNED, MPI_ANY_SOURCE, MPI_ANY_TAG/* TAG_RES*/, comm_team, &status );
MPI_Recv( &len, 1, MPI_UNSIGNED, MPI_ANY_SOURCE, /*MPI_ANY_TAG/*/ TAG_RES, comm_team, &status );
src = status.MPI_SOURCE;
if( len != 0 ) {
......@@ -178,7 +178,6 @@ gi::ex multiply_1level_foreman_hierarch_distribute_work( tensor3D_t& T, matrix_i
}
if( NULL != expr_c) free( expr_c );
return Tens;
}
......@@ -231,8 +230,8 @@ void multiply_1level_foreman_hierarch( tensor3D_t& T, matrix_int_t& J, int size,
if( status.MPI_TAG == TAG_WORK ){
/* Distribute the work on my workers */
Tens = multiply_1level_foreman_hierarch_distribute_work( T, J, size, params, symbols, comm_team );
Tens = multiply_1level_foreman_hierarch_distribute_work( T, J, size, params, symbols, comm_team, rank /* DEBUG */ );
/* Send the result to the master */
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment