Skip to content

Commit

Permalink
Remove Cajita Halo stored comm
Browse files Browse the repository at this point in the history
  • Loading branch information
streeve committed Aug 1, 2023
1 parent 438779a commit 5f29ca5
Showing 1 changed file with 17 additions and 31 deletions.
48 changes: 17 additions & 31 deletions cajita/src/Cajita_Halo.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,6 @@ class Halo
// Spatial dimension.
const std::size_t num_space_dim = Pattern::num_space_dim;

// Get the MPI communicator. All arrays must have the same
// communicator.
getComm( arrays... );

// Get the local grid.
auto local_grid = getLocalGrid( arrays... );

Expand Down Expand Up @@ -280,9 +276,6 @@ class Halo
}
}

// Destructor.
~Halo() { MPI_Comm_free( &_comm ); }

/*!
\brief Gather data into our ghosts from their owners.
Expand All @@ -304,6 +297,9 @@ class Halo
if ( 0 == num_n )
return;

// Get the MPI communicator.
auto comm = getComm( arrays... );

// Allocate requests.
std::vector<MPI_Request> requests( 2 * num_n, MPI_REQUEST_NULL );

Expand All @@ -319,8 +315,8 @@ class Halo
{
MPI_Irecv( _ghosted_buffers[n].data(),
_ghosted_buffers[n].size(), MPI_BYTE,
_neighbor_ranks[n], mpi_tag + _receive_tags[n],
_comm, &requests[n] );
_neighbor_ranks[n], mpi_tag + _receive_tags[n], comm,
&requests[n] );
}
}

Expand All @@ -337,7 +333,7 @@ class Halo
// Post a send.
MPI_Isend( _owned_buffers[n].data(), _owned_buffers[n].size(),
MPI_BYTE, _neighbor_ranks[n],
mpi_tag + _send_tags[n], _comm,
mpi_tag + _send_tags[n], comm,
&requests[num_n + n] );
}
}
Expand Down Expand Up @@ -390,6 +386,9 @@ class Halo
if ( 0 == num_n )
return;

// Get the MPI communicator.
auto comm = getComm( arrays... );

// Requests.
std::vector<MPI_Request> requests( 2 * num_n, MPI_REQUEST_NULL );

Expand All @@ -405,7 +404,7 @@ class Halo
{
MPI_Irecv( _owned_buffers[n].data(), _owned_buffers[n].size(),
MPI_BYTE, _neighbor_ranks[n],
mpi_tag + _receive_tags[n], _comm, &requests[n] );
mpi_tag + _receive_tags[n], comm, &requests[n] );
}
}

Expand All @@ -422,7 +421,7 @@ class Halo
// Post a send.
MPI_Isend( _ghosted_buffers[n].data(),
_ghosted_buffers[n].size(), MPI_BYTE,
_neighbor_ranks[n], mpi_tag + _send_tags[n], _comm,
_neighbor_ranks[n], mpi_tag + _send_tags[n], comm,
&requests[num_n + n] );
}
}
Expand Down Expand Up @@ -457,29 +456,19 @@ class Halo
}

public:
//! Get the communicator and check to make sure all are the same.
//! Return the communicator.
template <class Array_t>
void getComm( const Array_t& array )
MPI_Comm getComm( const Array_t& array ) const
{
// Duplicate the communicator so we have our own communication space.
MPI_Comm_dup( array.layout()->localGrid()->globalGrid().comm(),
&_comm );
return array.layout()->localGrid()->globalGrid().comm();
}

//! Get the communicator and check to make sure all are the same.
//! Return the communicator.
template <class Array_t, class... ArrayTypes>
void getComm( const Array_t& array, const ArrayTypes&... arrays )
MPI_Comm getComm( const Array_t& array, const ArrayTypes&... arrays ) const
{
// Recurse.
getComm( arrays... );

// Check that the communicator of this array is the same as the halo
// comm.
int result;
MPI_Comm_compare( array.layout()->localGrid()->globalGrid().comm(),
_comm, &result );
if ( result != MPI_CONGRUENT )
throw std::runtime_error( "Arrays have different communicators" );
return getComm( arrays... );
}

//! Get the local grid from the arrays. Check that the grids have the same
Expand Down Expand Up @@ -911,9 +900,6 @@ class Halo
}

private:
// MPI communicator.
MPI_Comm _comm;

// The ranks we will send/receive from.
std::vector<int> _neighbor_ranks;

Expand Down

0 comments on commit 5f29ca5

Please sign in to comment.