/************************************************************************ * This file has been written as a sample solution to an exercise in a * course given at the Edinburgh Parallel Computing Centre. It is made * freely available with the understanding that every copy of this file * must include this header and that EPCC takes no responsibility for * the use of the enclosed teaching material. * * Authors: Joel Malard, Alan Simpson * * Contact: epcc-tec@epcc.ed.ac.uk * * Purpose: A program that defines a two-dimensional cartesian * topology. * * Contents: C source code. * ************************************************************************/ #include #include #define msg_tag 111 #define COUNT 2 #define DIM 1 void main (int argc, char *argv[]) { int ierror, my_rank, size; int right, left; int int_rank, int_other, int_sum, i; float float_rank, float_other, float_sum; int array_of_blocklengths[COUNT]; MPI_Aint array_of_displacements[COUNT], first_var_address, second_var_address; MPI_Datatype array_of_types[COUNT], sendtype, recvtype; MPI_Comm new_comm; int dims[2], periods[2], reorder; MPI_Status send_status; MPI_Status recv_status; MPI_Request request; MPI_Init(&argc, &argv); /* Get process info. */ MPI_Comm_rank(MPI_COMM_WORLD, &my_rank); MPI_Comm_size(MPI_COMM_WORLD, &size); /* Set cartesian topology. */ dims[0] = 0; dims[1] = 0; periods[0] = 1; periods[1] = 1; reorder = 1; MPI_Dims_create(size,2,dims); MPI_Cart_create(MPI_COMM_WORLD,2,dims,periods,reorder,&new_comm); /* Get nearest neighbour ranks. */ MPI_Cart_shift(new_comm, DIM, 1, &left, &right); /* Set MPI datatypes for sending and receiving partial sums. */ array_of_blocklengths[0] = 1; array_of_blocklengths[1] = 1; MPI_Address(&int_rank, &first_var_address); MPI_Address(&float_rank, &second_var_address); array_of_displacements[0] = (MPI_Aint) 0; array_of_displacements[1] = second_var_address - first_var_address; array_of_types[0] = MPI_INT; array_of_types[1] = MPI_FLOAT; MPI_Type_struct(COUNT, array_of_blocklengths, array_of_displacements, array_of_types, &sendtype); MPI_Type_commit(&sendtype); MPI_Address(&int_other, &first_var_address); MPI_Address(&float_other, &second_var_address); array_of_displacements[0] = (MPI_Aint) 0; array_of_displacements[1] = second_var_address - first_var_address; MPI_Type_struct(COUNT, array_of_blocklengths, array_of_displacements, array_of_types, &recvtype); MPI_Type_commit(&recvtype); /* Compute sums of ranks along dimension DIM. */ int_sum = 0; float_sum = 0; int_rank = my_rank; float_rank = (float) my_rank; for( i = 0; i < dims[DIM]; i++) { MPI_Issend(&int_rank, 1, sendtype, right, msg_tag, new_comm, &request); MPI_Recv(&int_other, 1, recvtype, left, msg_tag, new_comm, &recv_status); MPI_Wait(&request, &send_status); int_sum = int_sum + int_other; int_rank = int_other; float_sum = float_sum + float_other; float_rank = float_other; } printf ("PE%d:\tSum = %d\t%f\n", my_rank, int_sum, float_sum); MPI_Finalize(); }