diff --git a/Makefile b/Makefile index c34823ab5a..68f031d6f0 100644 --- a/Makefile +++ b/Makefile @@ -759,6 +759,16 @@ endif LIBS += $(NCLIB) endif +ifneq "$(SCOTCH)" "" + SCOTCH_FCINCLUDES += -I$(SCOTCH)/include + SCOTCH_LIBS += -L$(SCOTCH)/lib64 -lptscotch -lscotch -lptscotcherr -lm + SCOTCH_FFLAGS = -DMPAS_SCOTCH + + FCINCLUDES += $(SCOTCH_FCINCLUDES) + CPPINCLUDES += $(SCOTCH_FCINCLUDES) + LIBS += $(SCOTCH_LIBS) + override CPPFLAGS += $(SCOTCH_FFLAGS) +endif ifneq "$(PNETCDF)" "" ifneq ($(wildcard $(PNETCDF)/lib/libpnetcdf.*), ) @@ -1415,6 +1425,60 @@ musica_fortran_test: $(eval MUSICA_FORTRAN_VERSION := $(shell pkg-config --modversion musica-fortran)) $(if $(findstring 1,$(MUSICA_FORTRAN_TEST)), $(info Built a simple test program with MUSICA-Fortran version $(MUSICA_FORTRAN_VERSION)), ) +scotch_fortran_test: + @# + @# Create C and Fortran test programs and try to build against the PT-SCOTCH library + @# + $(info Checking for a working Scotch library...) + $(eval SCOTCH_C_TEST := $(shell $\ + printf "#include \n\ + &#include \"mpi.h\"\n\ + &#include \"ptscotch.h\"\n\ + &int main(){\n\ + & int err;\n\ + & SCOTCH_Dgraph *dgraph;\n\ + & err = SCOTCH_dgraphInit(dgraph, MPI_COMM_WORLD);\n\ + & SCOTCH_dgraphExit(dgraph);\n\ + & return err;\n\ + &}\n" | sed 's/&/ /' > ptscotch_c_test.c; $\ + $\ + $(CC) $(CPPINCLUDES) $(CFLAGS) $(LDFLAGS) ptscotch_c_test.c -o ptscotch_c_test.x $(SCOTCH_LIBS) > ptscotch_c_test.log 2>&1; $\ + scotch_c_status=$$?; $\ + if [ $$scotch_c_status -eq 0 ]; then $\ + printf "1"; $\ + rm -f ptscotch_c_test.c ptscotch_c_test.x ptscotch_c_test.log; $\ + else $\ + printf "0"; $\ + fi $\ + )) + $(if $(findstring 0,$(SCOTCH_C_TEST)), $(error Could not build a simple C program with Scotch. $\ + Test program ptscotch_c_test.c and output ptscotch_c_test.log have been left $\ + in the top-level MPAS directory for further debugging )) + $(if $(findstring 1,$(SCOTCH_C_TEST)), $(info Built a simple C program with Scotch )) + $(eval SCOTCH_FORTRAN_TEST := $(shell $\ + printf "program test_scotch_fortran\n$\ + & include \"ptscotchf.h\"\n$\ + & doubleprecision :: scotchgraph (scotch_graphdim)\n$\ + & integer :: ierr\n$\ + & ierr = 0\n$\ + & call scotchfgraphinit(scotchgraph (1), ierr)\n$\ + & call scotchfgraphexit(scotchgraph(1))\n$\ + end program test_scotch_fortran\n" | sed 's/&/ /' > ptscotch_f_test.f90; $\ + $\ + $(FC) $(SCOTCH_FCINCLUDES) $(SCOTCH_FFLAGS) ptscotch_f_test.f90 -o ptscotch_f_test.x $(SCOTCH_LIBS) > ptscotch_f_test.log 2>&1; $\ + scotch_fortran_status=$$?; $\ + if [ $$scotch_fortran_status -eq 0 ]; then $\ + printf "1"; $\ + rm -f ptscotch_f_test.f90 ptscotch_f_test.x ptscotch_f_test.log; $\ + else $\ + printf "0"; $\ + fi $\ + )) + $(if $(findstring 0,$(SCOTCH_FORTRAN_TEST)), $(error Could not build a simple Fortran program with Scotch. $\ + Test program ptscotch_f_test.f90 and output ptscotch_f_test.log have been left $\ + in the top-level MPAS directory for further debugging )) + $(if $(findstring 1,$(SCOTCH_FORTRAN_TEST)), $(info Built a simple Fortran program with Scotch )) + pnetcdf_test: @# @# Create test C programs that look for PNetCDF header file and some symbols in it @@ -1471,6 +1535,13 @@ else MUSICA_MESSAGE = "MPAS was not linked with the MUSICA-Fortran library." endif +ifneq "$(SCOTCH_FFLAGS)" "" +MAIN_DEPS += scotch_fortran_test +SCOTCH_MESSAGE = "MPAS has been linked with the Scotch graph partitioning library." +else +SCOTCH_MESSAGE = "MPAS was NOT linked with the Scotch graph partitioning library." +endif + mpas_main: $(MAIN_DEPS) cd src; $(MAKE) FC="$(FC)" \ CC="$(CC)" \ @@ -1508,6 +1579,7 @@ mpas_main: $(MAIN_DEPS) @echo $(OPENMP_OFFLOAD_MESSAGE) @echo $(OPENACC_MESSAGE) @echo $(MUSICA_MESSAGE) + @echo $(SCOTCH_MESSAGE) @echo $(SHAREDLIB_MESSAGE) ifeq "$(AUTOCLEAN)" "true" @echo $(AUTOCLEAN_MESSAGE) diff --git a/src/framework/Makefile b/src/framework/Makefile index 2d8e7dc92b..4b3f1cbc5c 100644 --- a/src/framework/Makefile +++ b/src/framework/Makefile @@ -36,6 +36,8 @@ OBJS = mpas_kind_types.o \ mpas_log.o \ mpas_halo.o \ mpas_string_utils.o \ + mpas_ptscotch_interface.o \ + ptscotch_interface.o \ mpas_stream_inquiry.o \ stream_inquiry.o @@ -85,7 +87,7 @@ mpas_timekeeping.o: mpas_string_utils.o mpas_kind_types.o mpas_derived_types.o m mpas_timer.o: mpas_kind_types.o mpas_dmpar.o mpas_threading.o mpas_log.o -mpas_block_decomp.o: mpas_derived_types.o mpas_hash.o mpas_io_units.o mpas_dmpar.o +mpas_block_decomp.o: mpas_derived_types.o mpas_hash.o mpas_io_units.o mpas_dmpar.o mpas_ptscotch_interface.o mpas_block_creator.o: mpas_dmpar.o mpas_hash.o mpas_sort.o mpas_io_units.o mpas_block_decomp.o mpas_stream_manager.o mpas_decomp.o mpas_abort.o $(DEPS) @@ -112,6 +114,8 @@ xml_stream_parser.o: xml_stream_parser.c mpas_halo.o: mpas_derived_types.o mpas_pool_routines.o mpas_log.o +mpas_ptscotch_interface.o : mpas_derived_types.o mpas_dmpar.o mpas_log.o ptscotch_interface.o + mpas_stream_inquiry.o : mpas_derived_types.o mpas_log.o mpas_c_interfacing.o clean: diff --git a/src/framework/mpas_block_decomp.F b/src/framework/mpas_block_decomp.F index 4f3d197d5d..6faf05ca54 100644 --- a/src/framework/mpas_block_decomp.F +++ b/src/framework/mpas_block_decomp.F @@ -25,6 +25,9 @@ module mpas_block_decomp use mpas_derived_types use mpas_io_units use mpas_log +#ifdef MPAS_SCOTCH + use mpas_ptscotch_interface +#endif type graph integer :: nVerticesTotal @@ -51,6 +54,8 @@ module mpas_block_decomp subroutine mpas_block_decomp_cells_for_proc(dminfo, partial_global_graph_info, local_cell_list, block_id, block_start, & block_count, numBlocks, explicitProcDecomp, blockFilePrefix, procFilePrefix)!{{{ + use mpas_timer, only : mpas_timer_start, mpas_timer_stop + implicit none type (dm_info), intent(inout) :: dminfo !< Input: domain information @@ -77,9 +82,12 @@ subroutine mpas_block_decomp_cells_for_proc(dminfo, partial_global_graph_info, l character (len=StrKIND) :: filename logical :: no_blocks + logical :: useScotch no_blocks = .false. + call mpas_timer_start('mpas_block_decomp_cells_for_proc') + if (numBlocks == 0) then dminfo % total_blocks = dminfo % nProcs else @@ -95,53 +103,86 @@ subroutine mpas_block_decomp_cells_for_proc(dminfo, partial_global_graph_info, l allocate(local_nvertices(dminfo % nprocs)) allocate(global_start(dminfo % nprocs)) allocate(global_list(partial_global_graph_info % nVerticesTotal)) + + if (dminfo % my_proc_id == IO_NODE) then + + if (dminfo % total_blocks < 10) then + write(filename,'(a,i1)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 100) then + write(filename,'(a,i2)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 1000) then + write(filename,'(a,i3)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 10000) then + write(filename,'(a,i4)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 100000) then + write(filename,'(a,i5)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 1000000) then + write(filename,'(a,i6)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 10000000) then + write(filename,'(a,i7)') trim(blockFilePrefix), dminfo % total_blocks + else if (dminfo % total_blocks < 100000000) then + write(filename,'(a,i8)') trim(blockFilePrefix), dminfo % total_blocks + end if - if (dminfo % my_proc_id == IO_NODE) then - - if (dminfo % total_blocks < 10) then - write(filename,'(a,i1)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 100) then - write(filename,'(a,i2)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 1000) then - write(filename,'(a,i3)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 10000) then - write(filename,'(a,i4)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 100000) then - write(filename,'(a,i5)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 1000000) then - write(filename,'(a,i6)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 10000000) then - write(filename,'(a,i7)') trim(blockFilePrefix), dminfo % total_blocks - else if (dminfo % total_blocks < 100000000) then - write(filename,'(a,i8)') trim(blockFilePrefix), dminfo % total_blocks - end if - - call mpas_new_unit(iunit) - open(unit=iunit, file=trim(filename), form='formatted', status='old', iostat=istatus) - - if (istatus /= 0) then + call mpas_new_unit(iunit) + open(unit=iunit, file=trim(filename), form='formatted', status='old', iostat=istatus) + + if (istatus /= 0) then +#ifdef MPAS_SCOTCH + useScotch = .true. +#else call mpas_log_write('Could not open block decomposition file for $i blocks.', MPAS_LOG_ERR, intArgs=(/dminfo % total_blocks/) ) call mpas_log_write('Filename: '//trim(filename), MPAS_LOG_CRIT) - end if +#endif + else + useScotch = .false. + end if + end if - local_nvertices(:) = 0 - do i=1,partial_global_graph_info % nVerticesTotal - read(unit=iunit, fmt=*, iostat=err) global_block_id +#ifdef MPAS_SCOTCH + call mpas_dmpar_bcast_logical(dminfo, useScotch) - if ( err .ne. 0 ) then - call mpas_log_write('Decomoposition file: ' // trim(filename) // ' contains less than $i cells', & - MPAS_LOG_CRIT, intArgs=(/partial_global_graph_info % nVerticesTotal/) ) - end if - call mpas_get_owning_proc(dminfo, global_block_id, owning_proc) - local_nvertices(owning_proc+1) = local_nvertices(owning_proc+1) + 1 - end do + if (useScotch) then ! Using PT-Scotch across all MPI ranks + + ! Pre-emptively blocking this untested code path. + if (numBlocks /= 0) then + call mpas_log_write('Scotch partitioning not available when config_number_of_blocks != 0 ', MPAS_LOG_CRIT) + end if + + call mpas_log_write('No existing block decomposition file found, invoking Scotch.') + call mpas_block_decomp_scotch(dminfo, partial_global_graph_info, blockFilePrefix, filename) - read(unit=iunit, fmt=*, iostat=err) + if (dminfo % my_proc_id == IO_NODE) then + open(unit=iunit, file=trim(filename), form='formatted', status='old', iostat=istatus) + call mpas_log_write('After Scotch decomposition, attempting to read block decomposition file: '//trim(filename)) + if (istatus /= 0) then + call mpas_log_write('Could not open block decomposition file for $i blocks.', MPAS_LOG_ERR, intArgs=(/dminfo % total_blocks/) ) + call mpas_log_write('Filename: '//trim(filename), MPAS_LOG_CRIT) + end if + end if + end if +#endif + + if (dminfo % my_proc_id == IO_NODE) then + + local_nvertices(:) = 0 + do i=1,partial_global_graph_info % nVerticesTotal + read(unit=iunit, fmt=*, iostat=err) global_block_id + + if ( err .ne. 0 ) then + call mpas_log_write('Decomoposition file: ' // trim(filename) // ' contains less than $i cells', & + MPAS_LOG_CRIT, intArgs=(/partial_global_graph_info % nVerticesTotal/) ) + end if + call mpas_get_owning_proc(dminfo, global_block_id, owning_proc) + local_nvertices(owning_proc+1) = local_nvertices(owning_proc+1) + 1 + end do + + read(unit=iunit, fmt=*, iostat=err) - if ( err == 0 ) then - call mpas_log_write('Decomposition file: ' // trim(filename) // ' contains more than $i cells', & - MPAS_LOG_CRIT, intArgs=(/partial_global_graph_info % nVerticesTotal/) ) - end if + if ( err == 0 ) then + call mpas_log_write('Decomposition file: ' // trim(filename) // ' contains more than $i cells', & + MPAS_LOG_CRIT, intArgs=(/partial_global_graph_info % nVerticesTotal/) ) + end if global_start(1) = 1 do i=2,dminfo % nprocs @@ -285,8 +326,238 @@ subroutine mpas_block_decomp_cells_for_proc(dminfo, partial_global_graph_info, l end if end if + call mpas_log_write('mpas_block_decomp_cells_for_proc successful ') + + call mpas_timer_stop('mpas_block_decomp_cells_for_proc') + end subroutine mpas_block_decomp_cells_for_proc!}}} + + + +!*********************************************************************** +! +! routine mpas_block_decomp_scotch +! +!> \brief Use PT-Scotch to generate the graph partitioning +!> \author Abishek Gopal +!> \date 12/05/25 +!> \details +!> This routine invokes the PT-Scotch library to first construct a distributed graph from the +!> partial global graph information read by each processor, then partitions the graph into the +!> specified number of blocks, and after redistributing the graph, it finally gathers all the +!> local block ids (for each MPI rank) to the IO_NODE to write out to a partition file with the +!> specified prefix. +! +!----------------------------------------------------------------------- +#ifdef MPAS_SCOTCH + subroutine mpas_block_decomp_scotch(dminfo, partial_global_graph_info, blockFilePrefix, blockFilename)!{{{ + + use mpas_timer, only : mpas_timer_start, mpas_timer_stop +#ifdef MPAS_USE_MPI_F08 + use mpi_f08, only : MPI_Comm, MPI_COMM_WORLD, MPI_INTEGER, MPI_Comm_dup, MPI_Comm_free, MPI_Gather, MPI_Gatherv +#else + use mpi +#endif + + implicit none + + type (dm_info), intent(inout) :: dminfo !< Input: domain information + type (graph), intent(in) :: partial_global_graph_info !< Input: Global graph information + character (len=*), intent(in) :: blockFilePrefix !< Input: File prefix for block decomposition + character (len=*), intent(out) :: blockFilename !< Output: Block decomposition file name + + integer, dimension(:), pointer :: global_start + integer, dimension(:), allocatable :: local_cell_list + integer, dimension(:), allocatable :: local_block_list + + integer, dimension(:), allocatable ::global_block_id_arr, local_block_id_arr + integer :: i, global_block_id, local_block_id, iunit, ounit, istatus, ostatus, j, k + integer :: err, ierr + integer, dimension(:), pointer :: local_nvertices + integer :: num_local_vertices !< Number of local vertices for this processor + character (len=StrKIND) :: msg + + integer :: nLocEdgesGraph = 0, nLocVerticesGraph = 0, edgelocsiz = 0, npart = 1 + character (len=StrKIND) :: partitionFilePrefix + integer, dimension(:), allocatable :: edgeloctab, vertloctab + doubleprecision :: stradat (scotch_stratdim) + doubleprecision :: scotchgraph (SCOTCH_GRAPHDIM) + doubleprecision :: scotchdgraph (SCOTCH_DGRAPHDIM) + doubleprecision :: scotchdgraph_redist (SCOTCH_DGRAPHDIM) + integer :: mpi_ierr +#ifdef MPAS_USE_MPI_F08 + type (MPI_Comm) :: localcomm +#else + integer :: localcomm +#endif + + allocate(local_nvertices(dminfo % nprocs)) + allocate(global_start(dminfo % nprocs)) + allocate(global_block_id_arr(partial_global_graph_info % nVerticesTotal)) + allocate(local_block_id_arr(partial_global_graph_info % nVertices)) + + call mpas_timer_start('scotch_total') + + ! Count the number of edges (including to ghost cells) in the portion of graph + ! owned by the current rank. Each edge is counted twice, once for each vertex, + ! with the exception of edges to ghost vertices, which are counted only once. + do i=1,partial_global_graph_info % nVertices + do j=1,partial_global_graph_info % nAdjacent(i) + if (partial_global_graph_info % adjacencyList(j,i) == 0) cycle + nLocEdgesGraph = nLocEdgesGraph + 1 + ! call mpas_log_write('i=$i j=$i adj= $i', intArgs=(/i,j,partial_global_graph_info % adjacencyList(j,i)/) ) + end do + end do + + ! Holds the adjacency array for every local vertex + allocate(edgeloctab(nLocEdgesGraph)) + ! Array of start indices in edgeloctab for each local vertex + allocate(vertloctab(partial_global_graph_info % nVertices + 1)) + + ! Fill up edgeloctab and vertloctab + k = 1 + do i=1,partial_global_graph_info % nVertices + vertloctab(i) = k + do j=1,partial_global_graph_info % nAdjacent(i) + if (partial_global_graph_info % adjacencyList(j,i) == 0) cycle + edgeloctab(k) = partial_global_graph_info % adjacencyList(j,i) + k = k + 1 + end do + end do + vertloctab(partial_global_graph_info % nVertices+1) = nLocEdgesGraph + 1 + + ! Duplicate the communicator to be used by Scotch + call MPI_Comm_dup(dminfo % comm, localcomm, mpi_ierr) + if (mpi_ierr .ne. 0) then + call mpas_log_write('Cannot duplicate communicator') + endif + ! Initialize the Scotch graph data structure, and an extra one to hold the re-distributed graph +#ifdef MPAS_USE_MPI_F08 + call scotch_dgraphinit(scotchdgraph(1), localcomm% mpi_val) + call scotch_dgraphinit(scotchdgraph_redist(1), localcomm% mpi_val) +#else + call scotch_dgraphinit(scotchdgraph(1), localcomm) + call scotch_dgraphinit(scotchdgraph_redist(1), localcomm) +#endif + + ! From Scotch documentation: edgelocsiz is lower-bounded by the minimum size + ! of the edge array required to encompass all used adjacency values; it is + ! therefore at least equal to the maximum of the vendloctab entries, over all + ! local vertices, minus baseval; it can be set to edgelocnbr if the edge array is compact. + edgelocsiz = maxval(vertloctab) - 1 + + nLocVerticesGraph = partial_global_graph_info % nVertices + + call mpas_log_write('Before SCOTCH distributed graph build. nLocVertices =$i nLocEdges= $i', & + intArgs=(/nLocVerticesGraph, nLocEdgesGraph/)) + + ! Build the distributed Scotch graph and save it in scotchdgraph + ! Note: Optional arguments veloloctab, vlblloctab, edgegsttab, and edloloctab are not needed here. + call scotch_dgraphbuild (scotchdgraph(1), & + nLocVerticesGraph, & ! num of local vertices on the calling process + vertloctab (1), & ! Array of start indices in edgeloctab for each local vertex + nLocEdgesGraph, & ! Number of local edges, including to ghost vertices + edgelocsiz, & ! Defined previously + edgeloctab(1)) ! Holds the adjacency array for every local vertex + + ! Only needed during development/debugging. + call scotch_dgraphcheck (scotchdgraph(1)) + + ! Initialize the strategy data structure + call scotch_stratinit (stradat (1)) + + call mpas_timer_start('scotch_graph_partitioning') + ! Partition the distributed graph and save the result in local_block_id_arr + npart = dminfo % nProcs + call scotch_dgraphpart (scotchdgraph(1), npart, stradat (1), local_block_id_arr(1)) + + call mpas_timer_stop('scotch_graph_partitioning') + + ! After the paritioning above, each processor would not necessarily have information about all of the + ! vertices it owns. To obtain this information, Scotch provides a convenience function to redistribute the graph + ! to all processors, so that each processor has information about all of the vertices it owns. + call scotch_dgraphredist(scotchdgraph(1), & ! Input: original distributed graph + local_block_id_arr, & ! Input: the partition array + scotchdgraph_redist(1), & ! Output: re-distributed graph + num_local_vertices) ! Output: number of local vertices + + ! DO NOT REMOVE: This call is required if we want to read the local cell list directly after partitioning, + ! instead of reading it from the output partition file. + ! Extract the local cell list from the re-distributed graph. + ! allocate(local_cell_list(num_local_vertices)) + ! call scotch_dgraphdata(scotchdgraph_redist(1), local_cell_list) + ! do i=1,num_local_vertices + ! call mpas_log_write('local_cell_list($i): $i',MPAS_LOG_ERR, intArgs=(/i,local_cell_list(i)/)) + ! end do + + + allocate(local_block_list(num_local_vertices)) + + local_block_list(:)=dminfo % my_proc_id + + ! Using the local_nvertices array to hold the original number of vertices in + ! the partial graph readb by each processor. Might need to use a different array + ! to clear up potential confusion. + local_nvertices(dminfo % my_proc_id + 1) = partial_global_graph_info % nVertices + + ! call mpas_log_write('local_nvertices($i): $i', MPAS_LOG_ERR, intArgs=(/i,num_local_vertices/)) + + ! Gather all the partial_global_graph_info % nVertices to IO_NODE. + ! num_local_vertices is the number of vertices that this processor owns, determined by the + ! Scotch paritioning. Whereas artial_global_graph_info % nVertices is the number of vertices + ! resident in the partial graph read by this processor. The latter is the correct size of the + ! local_block_id_arr. + call MPI_Gather( partial_global_graph_info % nVertices, 1, MPI_INTEGER, local_nvertices, & + 1, MPI_INTEGER, 0, localcomm, ierr) + + ! Compute the displacements for gathering all the local_block_id_arr to global_block_id_arr + global_start(1) = 0 + do i=2,dminfo % nprocs + global_start(i) = global_start(i-1) + local_nvertices(i-1) + end do + + ! Gather all the local block ids to global_block_id_arr so IO_NODE can write out the partitioning data + call MPI_Gatherv( local_block_id_arr, partial_global_graph_info % nVertices, MPI_INTEGER, global_block_id_arr, & + local_nvertices, global_start, MPI_INTEGER, 0, localcomm, ierr) + ! Write out the paritioning data to a file from IO_NODE + if (dminfo % my_proc_id == IO_NODE) then + partitionFilePrefix=trim(blockFilePrefix) + if (trim(partitionFilePrefix) == '') then + write(partitionFilePrefix,'(i0,a)') partial_global_graph_info%nVerticesTotal,'.graph.info.part.' + end if + write(blockFilename,'(a,i0)') trim(partitionFilePrefix), dminfo % nProcs + + call mpas_log_write('Writing out Scotch Graph paritioning data to '//trim(blockFilename)) + call mpas_new_unit(ounit) + open(unit=ounit, file=trim(blockFilename), form='formatted', status='new', action="write", iostat=ostatus) + do i=1,partial_global_graph_info % nVerticesTotal + write(unit=ounit, fmt='(i0)', iostat=err) global_block_id_arr(i) + end do + close(unit=ounit) + call mpas_release_unit(ounit) + end if + + ! Clean up + call scotch_dgraphexit (scotchdgraph (1)) + call scotch_dgraphexit (scotchdgraph_redist (1)) + call scotch_stratexit (stradat (1)) + + deallocate(edgeloctab) + deallocate(vertloctab) + deallocate(local_block_list) + deallocate(local_nvertices) + deallocate(global_start) + deallocate(global_block_id_arr) + deallocate(local_block_id_arr) + + call MPI_Comm_free(localcomm, mpi_ierr) + call mpas_timer_stop('scotch_total') + call mpas_log_write('Scotch partition successful') + + end subroutine mpas_block_decomp_scotch +#endif + !*********************************************************************** ! ! routine mpas_block_decomp_partitioned_edge_list diff --git a/src/framework/mpas_ptscotch_interface.F b/src/framework/mpas_ptscotch_interface.F new file mode 100644 index 0000000000..03097984ee --- /dev/null +++ b/src/framework/mpas_ptscotch_interface.F @@ -0,0 +1,418 @@ +! Copyright (c) 2025 The University Corporation for Atmospheric Research (UCAR). +! +! Unless noted otherwise source code is licensed under the BSD license. +! Additional copyright and license information can be found in the LICENSE file +! distributed with this code, or at https://mpas-dev.github.io/license.html . +! +#ifdef MPAS_SCOTCH +module mpas_ptscotch_interface + use iso_c_binding, only : c_int, c_double +#include "ptscotchf.h" + public :: scotch_dgraphinit, scotch_dgraphbuild + +contains + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphinit + ! + !> \brief Initialize a SCOTCH distributed graph object + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Initializes a SCOTCH_Dgraph structure using a Fortran MPI communicator. + !> This subroutine wraps the C function scotchm_dgraphinit. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be initialized + !> comm - Fortran MPI communicator integer + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphinit(dgraph, comm) + use iso_c_binding, only : c_ptr, c_loc + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + + implicit none + ! Arguments + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + integer, intent(in) :: comm + + ! Return value + integer :: ierr + + interface + function scotchfdgraphinit(dgraph_ptr, localcomm) bind(C, name='scotchm_dgraphinit') result(err) + use iso_c_binding, only : c_ptr, c_int + type(c_ptr), value :: dgraph_ptr + integer(c_int), value :: localcomm + integer(c_int) :: err + end function scotchfdgraphinit + end interface + + ierr = scotchfdgraphinit(c_loc(dgraph), comm) + + if (ierr /= 0) then + call mpas_log_write('Error initalizing distributed Scotch graph') + else + call mpas_log_write('Successfully initialized distributed Scotch graph') + end if + + end subroutine scotch_dgraphinit + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphbuild + ! + !> \brief Build a SCOTCH distributed graph from local vertex/edge arrays + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Constructs a SCOTCH_Dgraph from local vertex and edge connectivity data. + !> This subroutine wraps the C function scotchm_dgraphbuild + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be built + !> nVertices - Number of local vertices + !> vertloctab - Array of size (nVertices+1) + !> giving the start index of edges for each local vertex + !> nLocEdgesGraph - Total number of local edges in the graph + !> edgelocsiz - Size of the adjncy array + !> adjncy - Array of size nLocEdgesGraph containing the + !> adjacency list for local vertices + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphbuild(dgraph, nVertices, vertloctab, nLocEdgesGraph, edgelocsiz, adjncy) + use iso_c_binding, only : c_ptr, c_loc + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + integer(SCOTCH_NUMSIZE), intent(in) :: nVertices + integer(SCOTCH_NUMSIZE), intent(in) :: vertloctab(nVertices+1) + integer(SCOTCH_NUMSIZE), intent(in) :: nLocEdgesGraph + integer(SCOTCH_NUMSIZE), intent(in) :: edgelocsiz + integer(SCOTCH_NUMSIZE), intent(in) :: adjncy(nLocEdgesGraph) + + ! Return value + integer :: ierr + + interface + function scotchfdgraphbuild(dgraph_ptr, nVertices, vertloctab, & + nLocEdgesGraph, edgelocsiz, adjncy) bind(C, name='scotchm_dgraphbuild') result(err) + use iso_c_binding, only : c_ptr, c_int + type(c_ptr), value :: dgraph_ptr + integer(c_int), value :: nVertices + integer(c_int) :: vertloctab(nVertices+1) + integer(c_int), value :: nLocEdgesGraph + integer(c_int), value :: edgelocsiz + integer(c_int) :: adjncy(nLocEdgesGraph) + integer(c_int) :: err + end function scotchfdgraphbuild + end interface + + ierr = 0 + + ierr = scotchfdgraphbuild(c_loc(dgraph), nVertices, vertloctab, & + nLocEdgesGraph, edgelocsiz, adjncy) + + if (ierr /= 0) then + call mpas_log_write('Error building distributed Scotch graph', MPAS_LOG_CRIT) + else + call mpas_log_write('Successfully built distributed Scotch graph') + end if + + end subroutine scotch_dgraphbuild + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphcheck + ! + !> \brief Perform consistency check on a SCOTCH distributed graph + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Validates the internal structure of a SCOTCH_Dgraph for consistency. + !> This subroutine wraps the C function scotchm_dgraphcheck. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be checked + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphcheck(dgraph) + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + + ! Return value + integer :: ierr + + interface + function scotchfdgraphcheck(dgraph_ptr) bind(C, name='scotchm_dgraphcheck') result(err) + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: dgraph_ptr + integer(c_int) :: err + end function scotchfdgraphcheck + end interface + + ierr = scotchfdgraphcheck(c_loc(dgraph)) + + if (ierr /= 0) then + call mpas_log_write('Error during distributed Scotch graph check', MPAS_LOG_CRIT) + else + call mpas_log_write('Successfully checked distributed Scotch graph') + end if + + end subroutine scotch_dgraphcheck + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphexit + ! + !> \brief Finalize/cleanup a SCOTCH distributed graph object + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Deallocates internal structures associated with a SCOTCH_Dgraph. + !> This subroutine wraps the C function scotchm_dgraphexit. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be finalized + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphexit(dgraph) + use mpas_log, only : mpas_log_write + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + + interface + subroutine scotchfdgraphexit(dgraph_ptr) bind(C, name='scotchm_dgraphexit') + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: dgraph_ptr + end subroutine scotchfdgraphexit + end interface + + call scotchfdgraphexit(c_loc(dgraph)) + + end subroutine scotch_dgraphexit + + !----------------------------------------------------------------------- + ! subroutine scotch_stratinit + ! + !> \brief Initialize a SCOTCH strategy object + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Initializes a SCOTCH_Strat structure and builds a default strategy + !> for distributed graph mapping. This function wraps the C function + !> scotchm_stratinit. + !> \arguments + !> stradat - SCOTCH_Strat structure to be initialized + ! + !----------------------------------------------------------------------- + subroutine scotch_stratinit(stradat) + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: stradat (scotch_stratdim) + + ! Return value + integer :: ierr + + interface + function scotchfstratinit(strat_ptr) bind(C, name='scotchm_stratinit') result(err) + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: strat_ptr + integer(c_int) :: err + end function scotchfstratinit + end interface + + ierr = scotchfstratinit(c_loc(stradat)) + + if (ierr /= 0) then + call mpas_log_write('Error during Scotch strategy initialization', MPAS_LOG_CRIT) + else + call mpas_log_write('Successfully initialized Scotch strategy') + end if + + end subroutine scotch_stratinit + + !----------------------------------------------------------------------- + ! subroutine scotch_stratexit + ! + !> \brief Finalize/cleanup a SCOTCH strategy object + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Deallocates internal structures associated with a SCOTCH_Strat. + !> This subroutine wraps the C function scotchm_stratexit. + !> \arguments + !> stradat - SCOTCH_Strat structure to be finalized + ! + !----------------------------------------------------------------------- + subroutine scotch_stratexit(stradat) + use mpas_log, only : mpas_log_write + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: stradat (scotch_stratdim) + + interface + subroutine scotchfstratexit(strat_ptr) bind(C, name='scotchm_stratexit') + use iso_c_binding, only : c_ptr + type(c_ptr), value :: strat_ptr + end subroutine scotchfstratexit + end interface + + call scotchfstratexit(c_loc(stradat)) + + end subroutine scotch_stratexit + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphpart + ! + !> \brief Partition a SCOTCH distributed graph + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Partitions the distributed graph into num_part parts using the + !> provided SCOTCH strategy object. This subroutine wraps the C function + !> scotchm_dgraphpart. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be partitioned + !> num_part - Number of partitions + !> stradat - SCOTCH_Strat structure containing partitioning strategy + !> parttab - Output array of size equal to number of local vertices, + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphpart(dgraph, num_part, stradat, parttab) + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + integer(SCOTCH_NUMSIZE), intent(in) :: num_part + doubleprecision, target, intent(in) :: stradat (scotch_stratdim) + integer(SCOTCH_NUMSIZE), intent(out) :: parttab(*) + + ! Return value + integer :: ierr + + interface + function scotchfdgraphpart(dgraph_ptr, num_part_loc, strat_ptr, parttab_loc ) bind(C, name='scotchm_dgraphpart') result(err) + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: dgraph_ptr + integer(c_int), value :: num_part_loc + type(c_ptr), value :: strat_ptr + integer(c_int) :: parttab_loc(*) + integer(c_int) :: err + end function scotchfdgraphpart + end interface + + ierr = scotchfdgraphpart(c_loc(dgraph), num_part, c_loc(stradat), parttab) + + if (ierr /= 0) then + call mpas_log_write('Error during Scotch graph partition', MPAS_LOG_CRIT) + else + call mpas_log_write('Successfully partitioned distributed Scotch graph') + end if + + end subroutine scotch_dgraphpart + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphredist + ! + !> \brief Redistribute a SCOTCH distributed graph according to partitions + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Redistributes the distributed graph structure based on a partition + !> table. This subroutine wraps the C function scotchm_dgraphredist. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to be redistributed + !> parttab - Input array of size equal to number of local vertices, + !> containing partition assignments + !> dgraph_out - SCOTCH_Dgraph structure to hold redistributed graph + !> num_local_vertices - Number of local vertices in the redistributed graph + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphredist(dgraph, parttab, dgraph_out, num_local_vertices) + use mpas_log, only : mpas_log_write + use mpas_derived_types, only : MPAS_LOG_CRIT + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + integer(SCOTCH_NUMSIZE), intent(in) :: parttab(*) + doubleprecision, target, intent(inout) :: dgraph_out (SCOTCH_DGRAPHDIM) + integer(SCOTCH_NUMSIZE) :: num_local_vertices + + ! Return value + integer :: ierr + + interface + function scotchfdgraphredist(dgraph_ptr, parttab_loc, dgraph_out_ptr, vertlocnbr ) bind(C, name='scotchm_dgraphredist') result(err) + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: dgraph_ptr + integer(c_int) :: parttab_loc(*) + type(c_ptr), value :: dgraph_out_ptr + integer(c_int) :: vertlocnbr + integer(c_int) :: err + end function scotchfdgraphredist + end interface + + ierr = scotchfdgraphredist(c_loc(dgraph), parttab, c_loc(dgraph_out), num_local_vertices) + + if (ierr /= 0) then + call mpas_log_write('Error during Scotch graph redistribution', MPAS_LOG_CRIT) + else + call mpas_log_write('Successfully redistributed Scotch graph') + end if + + end subroutine scotch_dgraphredist + + !----------------------------------------------------------------------- + ! subroutine scotch_dgraphdata + ! + !> \brief Extract vertex labels from a SCOTCH distributed graph + !> \author Abishek Gopal + !> \date 8 Dec 2025 + !> \details + !> Extracts vertex labels or stored IDs for local vertices into the + !> output array. This subroutine wraps the C function scotchm_dgraphdata. + !> \arguments + !> dgraph - SCOTCH_Dgraph structure to extract from + !> local_cell_list - Output array to hold vertex labels for local vertices + ! + !----------------------------------------------------------------------- + subroutine scotch_dgraphdata(dgraph, local_cell_list) + use mpas_log, only : mpas_log_write + use iso_c_binding, only : c_ptr, c_loc + + implicit none + + doubleprecision, target, intent(in) :: dgraph (SCOTCH_DGRAPHDIM) + integer(SCOTCH_NUMSIZE), intent(out) :: local_cell_list(*) + + interface + subroutine scotchfdgraphdata(dgraph_ptr, cell_list) bind(C, name='scotchm_dgraphdata') + use iso_c_binding, only : c_int, c_ptr + type(c_ptr), value :: dgraph_ptr + integer(c_int) :: cell_list(*) + end subroutine scotchfdgraphdata + end interface + + call scotchfdgraphdata(c_loc(dgraph), local_cell_list) + + end subroutine scotch_dgraphdata + +end module mpas_ptscotch_interface +#endif \ No newline at end of file diff --git a/src/framework/ptscotch_interface.c b/src/framework/ptscotch_interface.c new file mode 100644 index 0000000000..c12718abae --- /dev/null +++ b/src/framework/ptscotch_interface.c @@ -0,0 +1,285 @@ +/* + * Copyright (c) 2025, The University Corporation for Atmospheric Research (UCAR). + * + * Unless noted otherwise source code is licensed under the BSD license. + * Additional copyright and license information can be found in the LICENSE file + * distributed with this code, or at http://mpas-dev.github.com/license.html + */ +#ifdef MPAS_SCOTCH +#include +#include +#include +#include +#include +#include +#include +#include "ptscotch.h" + + +/******************************************************************************** + * + * scotchm_dgraphinit + * + * Initialize a SCOTCH distributed graph object using a Fortran MPI communicator. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * localcomm - Fortran MPI communicator handle (`MPI_Fint`) passed as `int` + * + * Returns: + * integer error code returned by `SCOTCH_dgraphInit` (0 on success). + * + ********************************************************************************/ +int scotchm_dgraphinit(void *ptr, int localcomm) +{ + MPI_Comm comm; + MPI_Comm comm2; + int size, rank, err; + + comm = MPI_Comm_f2c((MPI_Fint)localcomm); + + SCOTCH_Dgraph *dgraph = (SCOTCH_Dgraph *)ptr; + + err = SCOTCH_dgraphInit(dgraph, comm); + + return err; +} + + +/******************************************************************************** + * + * scotchm_dgraphbuild + * + * Build a SCOTCH distributed graph from local vertex/edge arrays. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * nVertices - number of local vertices + * vertloctab_1 - pointer to Fortran-style vertex index array (based) + * nLocEdgesGraph - number of local edges in the distributed graph + * edgelocsiz_1 - size of the local edge array + * adjncy - adjacency list array (edge destinations) + * + * Returns: + * integer error code returned by `SCOTCH_dgraphBuild` (0 on success). + * + ********************************************************************************/ +int scotchm_dgraphbuild(void *ptr, + SCOTCH_Num nVertices, + SCOTCH_Num *vertloctab_1, + SCOTCH_Num nLocEdgesGraph, + SCOTCH_Num edgelocsiz_1, + SCOTCH_Num *adjncy) +{ + SCOTCH_Num baseval = 1; /* Fortran-style 1-based indexing */ + SCOTCH_Num vertlocnbr = nVertices; + SCOTCH_Num *veloloctab = NULL; /* vertex weights not used */ + SCOTCH_Num *vlblloctab = NULL; /* vertex labels not used */ + SCOTCH_Num edgelocnbr = nLocEdgesGraph; + SCOTCH_Num edgelocsiz = edgelocsiz_1; + SCOTCH_Num *edgegsttab = NULL; /* Optional array holding the local and ghost indices */ + SCOTCH_Num *edloloctab = NULL; /* Optional array of integer loads for each local edge */ + + SCOTCH_Num *vertloctab = (SCOTCH_Num *)vertloctab_1; + SCOTCH_Num *vendloctab = vertloctab_1 + 1; + SCOTCH_Num *edgeloctab = (SCOTCH_Num *)adjncy; + + int i, err; + + SCOTCH_Dgraph *dgraph = (SCOTCH_Dgraph *)ptr; + + err = SCOTCH_dgraphBuild(dgraph, + baseval, + vertlocnbr, + vertlocnbr, + vertloctab, + vendloctab, + veloloctab, + vlblloctab, + edgelocnbr, + edgelocsiz, + edgeloctab, + edgegsttab, + edloloctab); + + return err; +} + + +/******************************************************************************** + * + * scotchm_dgraphcheck + * + * Perform an internal consistency check of a SCOTCH distributed graph. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * + * Returns: + * integer error code returned by `SCOTCH_dgraphCheck` (0 on success). + * + ********************************************************************************/ +int scotchm_dgraphcheck(void *ptr) +{ + return SCOTCH_dgraphCheck((SCOTCH_Dgraph *)ptr); +} + + +/******************************************************************************** + * + * scotchm_dgraphpart + * + * Partition the distributed graph into `num_part` parts using the provided + * SCOTCH strategy object. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * num_part - number of partitions + * ptr_strat - pointer to a `SCOTCH_Strat` structure (as `void *`) + * parttab - output array receiving part numbers for local vertices + * + * Returns: + * integer error code returned by `SCOTCH_dgraphPart` (0 on success). + * + ********************************************************************************/ +int scotchm_dgraphpart(void *ptr, SCOTCH_Num num_part, void *ptr_strat, SCOTCH_Num *parttab) +{ + SCOTCH_Dgraph *dgraph = (SCOTCH_Dgraph *)ptr; + SCOTCH_Strat *strat = (SCOTCH_Strat *)ptr_strat; + + return SCOTCH_dgraphPart(dgraph, num_part, strat, parttab); +} + + +/******************************************************************************** + * + * scotchm_dgraphredist + * + * Redistribute a distributed SCOTCH graph given the partition table. + * + * Parameters: + * ptr - pointer to input `SCOTCH_Dgraph` structure (as `void *`) + * partloctab - partition table for local vertices + * ptr_out - pointer to output `SCOTCH_Dgraph` structure (as `void *`) + * vertlocnbr - pointer to return the number of local vertices in output + * + * Returns: + * integer error code returned by `SCOTCH_dgraphRedist` (0 on success). + * + ********************************************************************************/ +int scotchm_dgraphredist(void *ptr, SCOTCH_Num *partloctab, void *ptr_out, SCOTCH_Num *vertlocnbr) +{ + SCOTCH_Dgraph *dgraph_in = (SCOTCH_Dgraph *)ptr; + SCOTCH_Dgraph *dgraph_out = (SCOTCH_Dgraph *)ptr_out; + SCOTCH_Num *permgsttab = NULL; /* Redistribution permutation array */ + SCOTCH_Num vertlocdlt = 0; /* Extra size of local vertex array */ + SCOTCH_Num edgelocdlt = 0; /* Extra size of local edge array */ + int err; + + err = SCOTCH_dgraphRedist(dgraph_in, partloctab, permgsttab, vertlocdlt, edgelocdlt, dgraph_out); + + // Call SCOTCH_dgraphSize to obtain the number of local vertices in the redistributed graph + SCOTCH_dgraphSize(dgraph_out, NULL, vertlocnbr, NULL, NULL); + + return err; +} + +/******************************************************************************** + * + * scotchm_dgraphdata + * + * Extract vertex labels (or stored IDs) for local vertices into `cell_list`. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * cell_list - output array to receive vertex labels for local vertices + * + * Returns: + * integer error code (currently returns the local `err` variable; 0 on success). + * + ********************************************************************************/ +void scotchm_dgraphdata(void *ptr, SCOTCH_Num *cell_list) +{ + + int err; + + SCOTCH_Num vertlocnbr; + SCOTCH_Num *vlblloctab; /* vertex labels */ + + SCOTCH_Dgraph *dgraph = (SCOTCH_Dgraph *)ptr; + + SCOTCH_dgraphData(dgraph, NULL, NULL, &vertlocnbr, NULL, NULL, + NULL, NULL, NULL, &vlblloctab, + NULL, NULL, NULL, + NULL, NULL, NULL, NULL); + + // Copy vertex labels to output array + for (SCOTCH_Num i = 0; i < vertlocnbr; i++) { + cell_list[i] = vlblloctab[i]; + } + +} + + +/******************************************************************************** + * + * scotchm_dgraphexit + * + * Finalize/cleanup a `SCOTCH_Dgraph` object. + * + * Parameters: + * ptr - pointer to a `SCOTCH_Dgraph` structure (as `void *`) + * + * Returns: + * nothing (wraps `SCOTCH_dgraphExit`). + * + ********************************************************************************/ +void scotchm_dgraphexit(void *ptr) +{ + SCOTCH_dgraphExit((SCOTCH_Dgraph *)ptr); +} + + +/******************************************************************************** + * + * scotchm_stratinit + * + * Initialize a SCOTCH strategy object and build a default strategy for + * distributed graph mapping. + * + * Parameters: + * strat_ptr - pointer to a `SCOTCH_Strat` structure (as `void *`) + * + * Returns: + * integer (0 on success). + * + ********************************************************************************/ +int scotchm_stratinit(void *strat_ptr) +{ + SCOTCH_stratInit((SCOTCH_Strat *)strat_ptr); + + // This was required to avoid crashes when scaling up to large core counts + SCOTCH_stratDgraphMapBuild((SCOTCH_Strat *)strat_ptr, SCOTCH_STRATSCALABILITY, 1, 0, 0.05); + + return 0; +} + + +/* ******************************************************************************** + * + * scotchm_stratexit + * + * Finalize/cleanup a `SCOTCH_Strat` strategy object. + * + * Parameters: + * strat_ptr - pointer to a `SCOTCH_Strat` structure (as `void *`) + * + * Returns: + * nothing (wraps `SCOTCH_stratExit`). + * + ********************************************************************************/ +void scotchm_stratexit(void *strat_ptr) +{ + SCOTCH_stratExit((SCOTCH_Strat *)strat_ptr); +} +#endif \ No newline at end of file