16 #ifndef POMEROL_INCLUDE_MPI_DISPATCHER_MPI_SKEL_HPP
17 #define POMEROL_INCLUDE_MPI_DISPATCHER_MPI_SKEL_HPP
50 void run() {
x.compute(); }
67 void run() {
x.prepare(); }
81 std::map<pMPI::JobId, pMPI::WorkerId>
run(MPI_Comm
const& Comm,
bool VerboseOutput =
true);
84 template <
typename WrapType>
91 if(comm_rank == root) {
92 std::cout <<
"Calculating " << parts.size() <<
" jobs using " << comm_size <<
" procs.\n";
95 std::unique_ptr<pMPI::MPIMaster> disp;
97 if(comm_rank == root) {
99 std::vector<pMPI::JobId> job_order(parts.size());
100 std::iota(job_order.begin(), job_order.end(), 0);
102 auto comp1 = [
this](std::size_t l, std::size_t r) ->
int {
103 return (parts[l].complexity > parts[r].complexity);
105 std::sort(job_order.begin(), job_order.end(), comp1);
113 if(comm_rank == root)
115 worker.receive_order();
116 if(worker.is_working()) {
117 JobId p = worker.current_job();
119 std::cout <<
"[" << p + 1 <<
"/" << parts.size() <<
"] P" << comm_rank <<
" : part " << p <<
" ["
120 << parts[p].complexity <<
"] run;\n";
122 worker.report_job_done();
124 if(comm_rank == root)
125 disp->check_workers();
131 if(VerboseOutput && comm_rank == root)
132 std::cout <<
"done.\n";
135 std::map<pMPI::JobId, pMPI::WorkerId> job_map;
136 if(comm_rank == root) {
137 job_map = disp->DispatchMap;
138 long n_jobs = job_map.size();
139 std::vector<pMPI::JobId> jobs(n_jobs);
140 std::vector<pMPI::WorkerId> workers(n_jobs);
142 auto it = job_map.cbegin();
143 for(
int i = 0; i < n_jobs; ++i, ++it) {
144 std::tie(jobs[i], workers[i]) = *it;
147 MPI_Bcast(&n_jobs, 1, MPI_LONG, root, Comm);
148 MPI_Bcast(jobs.data(), n_jobs, MPI_INT, root, Comm);
149 MPI_Bcast(workers.data(), n_jobs, MPI_INT, root, Comm);
152 MPI_Bcast(&n_jobs, 1, MPI_LONG, root, Comm);
153 std::vector<pMPI::JobId> jobs(n_jobs);
154 MPI_Bcast(jobs.data(), n_jobs, MPI_INT, root, Comm);
155 std::vector<pMPI::WorkerId> workers(n_jobs);
156 MPI_Bcast(workers.data(), n_jobs, MPI_INT, root, Comm);
157 for(std::size_t i = 0; i < n_jobs; ++i)
158 job_map[jobs[i]] = workers[i];
167 #endif // #ifndef POMEROL_INCLUDE_MPI_DISPATCHER_MPI_SKEL_HPP