Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixup device type #58

Merged
merged 2 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 8 additions & 7 deletions examples/dam_break.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ struct ParticleInitFunc
//---------------------------------------------------------------------------//
void damBreak( const double cell_size, const int ppc, const int halo_size,
const double delta_t, const double t_final, const int write_freq,
const std::string& device )
const std::string& exec_space )
{
// The dam break domain is in a box on [0,1] in each dimension.
Kokkos::Array<double, 6> global_box = { 0.0, 0.0, 0.0, 1.0, 1.0, 1.0 };
Expand Down Expand Up @@ -109,7 +109,7 @@ void damBreak( const double cell_size, const int ppc, const int halo_size,

// Solve the problem.
auto solver = ExaMPM::createSolver(
device, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
exec_space, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
partitioner, halo_size, ParticleInitFunc( cell_size, ppc, density ),
ppc, bulk_modulus, density, gamma, kappa, delta_t, gravity, bc );
solver->solve( t_final, write_freq );
Expand All @@ -126,7 +126,7 @@ int main( int argc, char* argv[] )
if ( argc < 8 )
{
std::cerr << "Usage: ./DamBreak cell_size parts_per_cell_size "
"halo_cells dt t_end write_freq device\n";
"halo_cells dt t_end write_freq exec_space\n";
std::cerr << "\nwhere cell_size edge length of a computational "
"cell (domain is unit cube)\n";
std::cerr
Expand All @@ -136,7 +136,7 @@ int main( int argc, char* argv[] )
std::cerr << " t_end simulation end time\n";
std::cerr
<< " write_freq number of steps between output files\n";
std::cerr << " device compute device: serial, openmp, "
std::cerr << " exec_space execute with: serial, openmp, "
"cuda, hip\n";
std::cerr << "\nfor example: ./DamBreak 0.05 2 0 0.001 1.0 10 serial\n";
Kokkos::finalize();
Expand All @@ -162,11 +162,12 @@ int main( int argc, char* argv[] )
// write frequency
int write_freq = std::atoi( argv[6] );

// device type
std::string device( argv[7] );
// execution space
std::string exec_space( argv[7] );

// run the problem.
damBreak( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device );
damBreak( cell_size, ppc, halo_size, delta_t, t_final, write_freq,
exec_space );

Kokkos::finalize();

Expand Down
15 changes: 8 additions & 7 deletions examples/free_fall.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ struct ParticleInitFunc
//---------------------------------------------------------------------------//
void freeFall( const double cell_size, const int ppc, const int halo_size,
const double delta_t, const double t_final, const int write_freq,
const std::string& device )
const std::string& exec_space )
{
// The free fall domain is in a box on [-0.5,0.5] in each dimension.
Kokkos::Array<double, 6> global_box = { -0.5, -0.5, -0.5, 0.5, 0.5, 0.5 };
Expand Down Expand Up @@ -106,7 +106,7 @@ void freeFall( const double cell_size, const int ppc, const int halo_size,

// Solve the problem.
auto solver = ExaMPM::createSolver(
device, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
exec_space, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
partitioner, halo_size, ParticleInitFunc( cell_size, ppc, density ),
ppc, bulk_modulus, density, gamma, kappa, delta_t, gravity, bc );
solver->solve( t_final, write_freq );
Expand All @@ -123,7 +123,7 @@ int main( int argc, char* argv[] )
if ( argc < 8 )
{
std::cerr << "Usage: ./FreeFall cell_size parts_per_cell_size "
"halo_cells dt t_end write_freq device\n";
"halo_cells dt t_end write_freq exec_space\n";
std::cerr << "\nwhere cell_size edge length of a computational "
"cell (domain is unit cube)\n";
std::cerr
Expand All @@ -133,7 +133,7 @@ int main( int argc, char* argv[] )
std::cerr << " t_end simulation end time\n";
std::cerr
<< " write_freq number of steps between output files\n";
std::cerr << " device compute device: serial, openmp, "
std::cerr << " exec_space execute with: serial, openmp, "
"cuda, hip\n";
std::cerr << "\nfor example: ./FreeFall 0.05 2 0 0.001 1.0 10 serial\n";
Kokkos::finalize();
Expand All @@ -159,11 +159,12 @@ int main( int argc, char* argv[] )
// write frequency
int write_freq = std::atoi( argv[6] );

// device type
std::string device( argv[7] );
// execution space
std::string exec_space( argv[7] );

// run the problem.
freeFall( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device );
freeFall( cell_size, ppc, halo_size, delta_t, t_final, write_freq,
exec_space );

Kokkos::finalize();

Expand Down
8 changes: 4 additions & 4 deletions src/ExaMPM_ParticleInit.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -107,14 +107,14 @@ void initializeParticles( const ExecSpace& exec_space,
const InitFunctor& create_functor,
ParticleList& particles )
{
// Device type.
using device_type = typename ParticleList::device_type;
// Kokkos memory space.
using memory_space = typename ParticleList::memory_space;

// Particle type.
using particle_type = typename ParticleList::tuple_type;

// Create a local mesh.
auto local_mesh = Cabana::Grid::createLocalMesh<device_type>( local_grid );
auto local_mesh = Cabana::Grid::createLocalMesh<memory_space>( local_grid );

// Get the local set of owned cell indices.
auto owned_cells = local_grid.indexSpace(
Expand All @@ -128,7 +128,7 @@ void initializeParticles( const ExecSpace& exec_space,
particles.resize( num_particles );

// Creation status.
auto particle_created = Kokkos::View<bool*, device_type>(
auto particle_created = Kokkos::View<bool*, memory_space>(
Kokkos::ViewAllocateWithoutInitializing( "particle_created" ),
num_particles );

Expand Down
23 changes: 13 additions & 10 deletions src/ExaMPM_Solver.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ class Solver : public SolverBase
// Creation method.
template <class InitFunc>
std::shared_ptr<SolverBase>
createSolver( const std::string& device, MPI_Comm comm,
createSolver( const std::string& exec_space, MPI_Comm comm,
const Kokkos::Array<double, 6>& global_bounding_box,
const std::array<int, 3>& global_num_cell,
const std::array<bool, 3>& periodic,
Expand All @@ -155,8 +155,9 @@ createSolver( const std::string& device, MPI_Comm comm,
const double delta_t, const double gravity,
const BoundaryCondition& bc )
{
if ( 0 == device.compare( "serial" ) || 0 == device.compare( "Serial" ) ||
0 == device.compare( "SERIAL" ) )
if ( 0 == exec_space.compare( "serial" ) ||
0 == exec_space.compare( "Serial" ) ||
0 == exec_space.compare( "SERIAL" ) )
{
#ifdef KOKKOS_ENABLE_SERIAL
return std::make_shared<
Expand All @@ -168,9 +169,9 @@ createSolver( const std::string& device, MPI_Comm comm,
throw std::runtime_error( "Serial Backend Not Enabled" );
#endif
}
else if ( 0 == device.compare( "openmp" ) ||
0 == device.compare( "OpenMP" ) ||
0 == device.compare( "OPENMP" ) )
else if ( 0 == exec_space.compare( "openmp" ) ||
0 == exec_space.compare( "OpenMP" ) ||
0 == exec_space.compare( "OPENMP" ) )
{
#ifdef KOKKOS_ENABLE_OPENMP
return std::make_shared<
Expand All @@ -182,8 +183,9 @@ createSolver( const std::string& device, MPI_Comm comm,
throw std::runtime_error( "OpenMP Backend Not Enabled" );
#endif
}
else if ( 0 == device.compare( "cuda" ) || 0 == device.compare( "Cuda" ) ||
0 == device.compare( "CUDA" ) )
else if ( 0 == exec_space.compare( "cuda" ) ||
0 == exec_space.compare( "Cuda" ) ||
0 == exec_space.compare( "CUDA" ) )
{
#ifdef KOKKOS_ENABLE_CUDA
return std::make_shared<
Expand All @@ -195,8 +197,9 @@ createSolver( const std::string& device, MPI_Comm comm,
throw std::runtime_error( "CUDA Backend Not Enabled" );
#endif
}
else if ( 0 == device.compare( "hip" ) || 0 == device.compare( "Hip" ) ||
0 == device.compare( "HIP" ) )
else if ( 0 == exec_space.compare( "hip" ) ||
0 == exec_space.compare( "Hip" ) ||
0 == exec_space.compare( "HIP" ) )
{
#ifdef KOKKOS_ENABLE_HIP
return std::make_shared<ExaMPM::Solver<Kokkos::Experimental::HIPSpace,
Expand Down
Loading