homebrew-core/Formula/mpich.rb
2019-06-09 19:49:45 -04:00

82 lines
2.6 KiB
Ruby

class Mpich < Formula
desc "Implementation of the MPI Message Passing Interface standard"
homepage "https://www.mpich.org/"
url "https://www.mpich.org/static/downloads/3.3.1/mpich-3.3.1.tar.gz"
mirror "https://fossies.org/linux/misc/mpich-3.3.1.tar.gz"
sha256 "fe551ef29c8eea8978f679484441ed8bb1d943f6ad25b63c235d4b9243d551e5"
bottle do
cellar :any
sha256 "4483dc34e84b9aea1ed7ffbf84145a62d59dd3b319eefed12ed92fffbb559389" => :mojave
sha256 "aba87dc0cbb581fc52cdb1462ed6a2b32c56cbebbd59a682da884a22f437b7b5" => :high_sierra
sha256 "b64674c00c36b6bd9ba9f177f53f33c6f5f6d5c107f9069a7a5f558cfddb3499" => :sierra
end
head do
url "https://github.com/pmodels/mpich.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "gcc" # for gfortran
conflicts_with "open-mpi", :because => "both install MPI compiler wrappers"
def install
if build.head?
# ensure that the consistent set of autotools built by homebrew is used to
# build MPICH, otherwise very bizarre build errors can occur
ENV["MPICH_AUTOTOOLS_DIR"] = HOMEBREW_PREFIX + "bin"
system "./autogen.sh"
end
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make"
system "make", "check"
system "make", "install"
end
test do
(testpath/"hello.c").write <<~EOS
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
(testpath/"hellof.f90").write <<~EOS
program hello
include 'mpif.h'
integer rank, size, ierror, tag, status(MPI_STATUS_SIZE)
call MPI_INIT(ierror)
call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror)
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror)
print*, 'node', rank, ': Hello Fortran world'
call MPI_FINALIZE(ierror)
end
EOS
system "#{bin}/mpif90", "hellof.f90", "-o", "hellof"
system "./hellof"
system "#{bin}/mpirun", "-np", "4", "./hellof"
end
end