X-Git-Url: http://git.kpe.io/?p=ctsim.git;a=blobdiff_plain;f=src%2Fphm2if.cpp;fp=src%2Fphm2if.cpp;h=0d1a09f5fe37cb5ad81af4b74a9b22740275724d;hp=2afac40c8030132cec197c927aa62f3610d05087;hb=7438260c07e4f3d71c7f43669678c3a912682e4c;hpb=c7b8cd29c1e15bd5a95bff829772da2af1989fe5 diff --git a/src/phm2if.cpp b/src/phm2if.cpp index 2afac40..0d1a09f 100644 --- a/src/phm2if.cpp +++ b/src/phm2if.cpp @@ -2,8 +2,11 @@ ** This is part of the CTSim program ** Copyright (C) 1983-2000 Kevin Rosenberg ** -** $Id: phm2if.cpp,v 1.3 2000/06/07 07:43:19 kevin Exp $ +** $Id: phm2if.cpp,v 1.4 2000/06/07 10:12:05 kevin Exp $ ** $Log: phm2if.cpp,v $ +** Revision 1.4 2000/06/07 10:12:05 kevin +** Upgraded from MPI to MPI++ +** ** Revision 1.3 2000/06/07 07:43:19 kevin ** Converted to IF data files and C++ ** @@ -137,7 +140,7 @@ phm2sdf_usage (const char *program) fprintf(stdout," --help Print this help message\n"); } -#ifdef MPI_CT +#ifdef HAVE_MPI void mpi_gather_image (ImageFile& im_global, ImageFile& im_local, const int opt_debug); #endif @@ -161,30 +164,30 @@ phm2sdf_main (const int argc, char *const argv[]) int opt_trace = TRACE_NONE; int opt_verbose = 0; double time_start=0, time_end=0; -#ifdef MPI_CT +#ifdef HAVE_MPI ImageFile* im_local = NULL; int mpi_argc = argc; char **mpi_argv = (char **) argv; double mpi_t1, mpi_t2, mpi_t, mpi_t_g; - MPI_Init(&mpi_argc, &mpi_argv); - MPI_Comm_dup (MPI_COMM_WORLD, &mpi_ct.comm); - MPI_Comm_size(mpi_ct.comm, &mpi_ct.nproc); - MPI_Comm_rank(mpi_ct.comm, &mpi_ct.my_rank); + MPI::Init (mpi_argc, mpi_argv); + mpi_ct.comm = MPI::COMM_WORLD.Dup (); + mpi_ct.nproc = mpi_ct.comm.Get_size(); + mpi_ct.my_rank = mpi_ct.comm.Get_rank(); - if (mpi_ct.nproc > MPI_MAX_PROCESS) { + if (mpi_ct.nproc > CT_MPI_MAX_PROCESS) { sys_error(ERR_FATAL, "Number of mpi processes (%d) exceeds max processes (%d)", - mpi_ct.nproc, MPI_MAX_PROCESS); + mpi_ct.nproc, CT_MPI_MAX_PROCESS); } #endif -#ifdef MPI_CT - time_start = MPI_Wtime(); +#ifdef HAVE_MPI + time_start = MPI::Wtime(); #else time_start = td_current_sec(); #endif -#ifdef MPI_CT +#ifdef HAVE_MPI if (mpi_ct.my_rank == 0) { #endif @@ -208,7 +211,7 @@ phm2sdf_main (const int argc, char *const argv[]) case O_PHMFILE: strncpy(opt_phmfilename, optarg, sizeof(opt_phmfilename)); phm = phm_create_from_file(opt_phmfilename); -#ifdef MPI_CT +#ifdef HAVE_MPI if (mpi_ct.my_rank == 0) fprintf(stderr, "Can't use phantom from file in MPI mode\n"); return (1); @@ -333,28 +336,28 @@ phm2sdf_main (const int argc, char *const argv[]) if (opt_verbose) printf("Rasterize Phantom to Image\n\n"); -#ifdef MPI_CT +#ifdef HAVE_MPI } #endif -#ifdef MPI_CT - mpi_t1 = MPI_Wtime(); - MPI_Bcast(&opt_verbose, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_debug, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_trace, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_nx, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_ny, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_nsample, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_phmnum, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_filter, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_filter_domain, 1, MPI_INT, 0, mpi_ct.comm); - MPI_Bcast(&opt_filter_param, 1, MPI_DOUBLE, 0, mpi_ct.comm); - MPI_Bcast(&opt_filter_bw, 1, MPI_DOUBLE, 0, mpi_ct.comm); +#ifdef HAVE_MPI + mpi_t1 = MPI::Wtime(); + mpi_ct.comm.Bcast (&opt_verbose, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_debug, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_trace, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_nx, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_ny, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_nsample, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_phmnum, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_filter, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_filter_domain, 1, MPI::INT, 0); + mpi_ct.comm.Bcast (&opt_filter_param, 1, MPI::DOUBLE, 0); + mpi_ct.comm.Bcast (&opt_filter_bw, 1, MPI::DOUBLE, 0); if (opt_verbose) { - mpi_t2 = MPI_Wtime(); + mpi_t2 = MPI::Wtime(); mpi_t = mpi_t2 - mpi_t1; - MPI_Reduce(&mpi_t, &mpi_t_g, 1, MPI_DOUBLE, MPI_MAX, 0, mpi_ct.comm); + mpi_ct.comm.Reduce(&mpi_t, &mpi_t_g, 1, MPI::DOUBLE, MPI::MAX, 0); if (mpi_ct.my_rank == 0) printf("Time to Bcast vars = %f secs, Max time = %f\n", mpi_t, mpi_t_g); } @@ -375,7 +378,7 @@ phm2sdf_main (const int argc, char *const argv[]) if (opt_phmnum >= 0) phm = phm_create (opt_phmnum); -#ifdef MPI_CT +#ifdef HAVE_MPI else { if (mpi_ct.my_rank == 0) fprintf(stderr, "phmnum < 0\n"); @@ -386,7 +389,7 @@ phm2sdf_main (const int argc, char *const argv[]) ImageFileArray v = im_global->getArray (); double calctime = 0; -#ifdef MPI_CT +#ifdef HAVE_MPI if (phm->type == P_UNIT_PULSE) { if (mpi_ct.my_rank == 0) { v[opt_nx/2][opt_ny/2] = 1.; @@ -399,12 +402,12 @@ phm2sdf_main (const int argc, char *const argv[]) } } else { if (opt_verbose) - mpi_t1 = MPI_Wtime(); + mpi_t1 = MPI::Wtime(); phm_to_imagefile (phm, *im_local, mpi_ct.start_work_unit[mpi_ct.my_rank], mpi_ct.local_work_units[mpi_ct.my_rank], opt_nsample, opt_trace); if (opt_verbose) { - mpi_t2 = MPI_Wtime(); + mpi_t2 = MPI::Wtime(); mpi_t = mpi_t2 - mpi_t1; - MPI_Reduce(&mpi_t, &mpi_t_g, 1, MPI_DOUBLE, MPI_MAX, 0, mpi_ct.comm); + mpi_ct.comm.Reduce(&mpi_t, &mpi_t_g, 1, MPI::DOUBLE, MPI::MAX, 0); if (mpi_ct.my_rank == 0) printf("Time to compile phm = %f secs, Max time = %f\n", mpi_t, mpi_t_g); } @@ -426,13 +429,13 @@ phm2sdf_main (const int argc, char *const argv[]) } #endif -#ifdef MPI_CT - time_end = MPI_Wtime(); +#ifdef HAVE_MPI + time_end = MPI::Wtime(); #else time_end = td_current_sec(); #endif -#ifdef MPI_CT +#ifdef HAVE_MPI if (mpi_ct.my_rank == 0) #endif { @@ -462,7 +465,7 @@ phm2sdf_main (const int argc, char *const argv[]) -#ifdef MPI_CT +#ifdef HAVE_MPI void mpi_gather_image (ImageFile& im_global, ImageFile& im_local, const int opt_debug) { ImageFileArray vLocal = im_local.getArray(); @@ -472,7 +475,7 @@ void mpi_gather_image (ImageFile& im_global, ImageFile& im_local, const int opt_ int end_work_unit = mpi_ct.local_work_units[mpi_ct.my_rank] - 1; for (int iw = 0; iw <= end_work_unit; iw++) { - MPI_Send(vLocal[iw], nyLocal, im_local.getMPIDataType(), 0, 0, mpi_ct.comm); + mpi_ct.comm.Send(vLocal[iw], nyLocal, im_local.getMPIDataType(), 0, 0); } if (mpi_ct.my_rank == 0) { @@ -485,8 +488,8 @@ void mpi_gather_image (ImageFile& im_global, ImageFile& im_local, const int opt_ } for (int iw = mpi_ct.start_work_unit[iproc]; iw <= end_work_unit; iw++) { - MPI_Status status; - MPI_Recv(vGlobal[iw], nyGlobal, MPI_FLOAT, iproc, 0, mpi_ct.comm, &status); + MPI::Status status; + mpi_ct.comm.Recv(vGlobal[iw], nyGlobal, MPI::FLOAT, iproc, 0, status); } } }