1 #ifndef _READ_MAT_DIST_H_
2 #define _READ_MAT_DIST_H_
10 #include <std::vector>
15 #ifndef __STDC_CONSTANT_MACROS
16 #define __STDC_CONSTANT_MACROS
18 #ifndef __STDC_LIMIT_MACROS
19 #define __STDC_LIMIT_MACROS
23 #include "CombBLAS/CombBLAS.h"
28 template <
typename PARMAT>
40 template <
typename IT,
typename NT>
43 std::vector<IT> vecEss;
44 std::vector< SpDCCols<IT, NT> > partsmat;
49 shared_ptr<CommGrid> layerGrid;
53 A->ParallelReadMM(filename);
58 if(
A->getnrow() ==
A->getncol())
60 if(p.TotalLength()!=
A->getnrow())
63 p.
iota(
A->getnrow(), 0);
76 double trans_beg = MPI_Wtime();
81 double split_beg = MPI_Wtime();
82 localmat->
ColSplit(nparts, partsmat);
84 for(
int i=0; i< nparts; ++i)
86 std::vector<IT> ess = partsmat[i].GetEssentials();
87 for(
auto itr = ess.begin(); itr != ess.end(); ++itr)
89 vecEss.push_back(*itr);
95 double scatter_beg = MPI_Wtime();
98 std::vector<IT> myess(esscnt);
99 MPI_Scatter(vecEss.data(), esscnt, MPIType<IT>(), myess.data(), esscnt, MPIType<IT>(), 0, CMG.
fiberWorld);
103 splitmat = partsmat[0];
104 for(
int recipient=1; recipient< nparts; ++recipient)
107 Arr<IT,NT> arrinfo = partsmat[recipient].GetArrays();
108 for(
unsigned int i=0; i< arrinfo.
indarrs.size(); ++i)
113 for(
unsigned int i=0; i< arrinfo.
numarrs.size(); ++i)
121 splitmat.Create(myess);
125 for(
unsigned int i=0; i< arrinfo.
indarrs.size(); ++i)
127 MPI_Recv(arrinfo.
indarrs[i].addr, arrinfo.
indarrs[i].count, MPIType<IT>(), 0, tag++, CMG.
fiberWorld, MPI_STATUS_IGNORE);
129 for(
unsigned int i=0; i< arrinfo.
numarrs.size(); ++i)
131 MPI_Recv(arrinfo.
numarrs[i].addr, arrinfo.
numarrs[i].count, MPIType<NT>(), 0, tag++, CMG.
fiberWorld, MPI_STATUS_IGNORE);