#include #include #include /* Prototype */ void other_work(int myid); float integral(float ai, float h, int n); int main(int argc, char* argv[]) { /*############################################################################### # # # This is an MPI example on parallel integration to demonstrate the use of: # # # # * MPI_Init, MPI_Comm_rank, MPI_Comm_size, MPI_Finalize # # * MPI_Recv, MPI_Isend, MPI_Wait # # * MPI_ANY_SOURCE, MPI_ANY_TAG # # # # Dr. Kadin Tseng # # Scientific Computing and Visualization # # Boston University # # 1998 # # # ###############################################################################*/ int n, p, myid, tag, master, proc, ierr; float h, integral_sum, a, b, ai, pi, my_int; MPI_Comm comm; MPI_Request request; MPI_Status status; comm = MPI_COMM_WORLD; ierr = MPI_Init(&argc,&argv); /* starts MPI */ MPI_Comm_rank(comm, &myid); /* get current process id */ MPI_Comm_size(comm, &p); /* get number of processes */ master = 0; pi = acos(-1.0); /* = 3.14159... */ a = 0.; /* lower limit of integration */ b = pi*1./2.; /* upper limit of integration */ n = 500; /* number of increment within each process */ tag = 123; /* set the tag to identify this particular job */ h = (b-a)/n/p; /* length of increment */ ai = a + myid*n*h; /* lower limit of integration for partition myid */ my_int = integral(ai, h, n); /* 0<=myid<=p-1 */ printf("Process %d has the partial result of %f\n", myid, my_int); if(myid == master) { integral_sum = my_int; for (proc=1;proc