COMBINATORIAL_BLAS 1.6
 
Loading...
Searching...
No Matches
GenRmatDist.h
Go to the documentation of this file.
1#ifndef _GEN_RMAT_DIST_H_
2#define _GEN_RMAT_DIST_H_
3
4#include <mpi.h>
5#include <sys/time.h>
6#include <iostream>
7#include <iomanip>
8#include <functional>
9#include <algorithm>
10#include <vector>
11#include <string>
12#include <sstream>
13
14// These macros should be defined before stdint.h is included
15#ifndef __STDC_CONSTANT_MACROS
16#define __STDC_CONSTANT_MACROS
17#endif
18#ifndef __STDC_LIMIT_MACROS
19#define __STDC_LIMIT_MACROS
20#endif
21#include <stdint.h>
22
23#include "CombBLAS/CombBLAS.h"
24#include "Glue.h"
25
26namespace combblas {
27
28template<typename IT, typename NT>
29SpDCCols<IT,NT> * GenRMat(unsigned scale, unsigned EDGEFACTOR, double initiator[4], MPI_Comm & layerworld, bool scramble)
30{
31 double t01 = MPI_Wtime();
32 double t02;
33
35
37 int nprocs = DEL->commGrid->GetSize();
38 minfo << "Started Generation of scale "<< scale << endl;
39 minfo << "Using " << nprocs << " MPI processes" << endl;
41
42 DEL->GenGraph500Data(initiator, scale, EDGEFACTOR, scramble, false );
43 // don't generate packed edges, that function uses MPI_COMM_WORLD which can not be used in a single layer!
44
45 SpParHelper::Print("Generated renamed edge lists\n");
47 t02 = MPI_Wtime();
48 tinfo << "Generation took " << t02-t01 << " seconds" << endl;
50
52
53 delete DEL;
54 SpParHelper::Print("Created Sparse Matrix\n");
55
56 float balance = A->LoadImbalance();
58 outs << "Load balance: " << balance << endl;
60
61 return A->seqptr();
62}
63
67template <typename IT, typename NT>
68void Generator(unsigned scale, unsigned EDGEFACTOR, double initiator[4], CCGrid & CMG, SpDCCols<IT,NT> & splitmat, bool trans, bool scramble)
69{
70 std::vector<IT> vecEss; // at layer_grid=0, this will have [CMG.GridLayers * SpDCCols<IT,NT>::esscount] entries
71 std::vector< SpDCCols<IT, NT> > partsmat; // only valid at layer_grid=0
72 int nparts = CMG.GridLayers;
73 if(CMG.layer_grid == 0)
74 {
76
77 double trans_beg = MPI_Wtime();
78 if(trans) localmat->Transpose(); // locally transpose
80
81 double split_beg = MPI_Wtime();
82 localmat->ColSplit(nparts, partsmat); // split matrices are emplaced-back into partsmat vector, localmat destroyed
83
84 for(int i=0; i< nparts; ++i)
85 {
86 std::vector<IT> ess = partsmat[i].GetEssentials();
87 for(auto itr = ess.begin(); itr != ess.end(); ++itr)
88 {
89 vecEss.push_back(*itr);
90 }
91 }
93 }
94
95 double scatter_beg = MPI_Wtime(); // timer on
96 int esscnt = SpDCCols<IT,NT>::esscount; // necessary cast for MPI
97
98 std::vector<IT> myess(esscnt);
99 MPI_Scatter(vecEss.data(), esscnt, MPIType<IT>(), myess.data(), esscnt, MPIType<IT>(), 0, CMG.fiberWorld);
100
101 if(CMG.layer_grid == 0) // senders
102 {
103 splitmat = partsmat[0]; // just copy the local split
104 for(int recipient=1; recipient< nparts; ++recipient) // scatter the others
105 {
106 int tag = 0;
107 Arr<IT,NT> arrinfo = partsmat[recipient].GetArrays();
108 for(unsigned int i=0; i< arrinfo.indarrs.size(); ++i) // get index arrays
109 {
110 // MPI_Send(const void *buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm)
111 MPI_Send(arrinfo.indarrs[i].addr, arrinfo.indarrs[i].count, MPIType<IT>(), recipient, tag++, CMG.fiberWorld);
112 }
113 for(unsigned int i=0; i< arrinfo.numarrs.size(); ++i) // get numerical arrays
114 {
115 MPI_Send(arrinfo.numarrs[i].addr, arrinfo.numarrs[i].count, MPIType<NT>(), recipient, tag++, CMG.fiberWorld);
116 }
117 }
118 }
119 else // receivers
120 {
121 splitmat.Create(myess); // allocate memory for arrays
122 Arr<IT,NT> arrinfo = splitmat.GetArrays();
123
124 int tag = 0;
125 for(unsigned int i=0; i< arrinfo.indarrs.size(); ++i) // get index arrays
126 {
127 MPI_Recv(arrinfo.indarrs[i].addr, arrinfo.indarrs[i].count, MPIType<IT>(), 0, tag++, CMG.fiberWorld, MPI_STATUS_IGNORE);
128 }
129 for(unsigned int i=0; i< arrinfo.numarrs.size(); ++i) // get numerical arrays
130 {
131 MPI_Recv(arrinfo.numarrs[i].addr, arrinfo.numarrs[i].count, MPIType<NT>(), 0, tag++, CMG.fiberWorld, MPI_STATUS_IGNORE);
132 }
133 }
135}
136
137}
138
139#endif
#define EDGEFACTOR
Definition DirOptBFS.cpp:81
double comp_split
double comm_split
double comp_trans
void GenGraph500Data(double initiator[4], int log_numverts, int edgefactor, bool scramble=false, bool packed=false)
std::shared_ptr< CommGrid > commGrid
static void Print(const std::string &s)
int nprocs
Definition comms.cpp:55
SpDCCols< IT, NT > * GenRMat(unsigned scale, unsigned EDGEFACTOR, double initiator[4], MPI_Comm &layerworld, bool scramble)
Definition GenRmatDist.h:29
void Generator(unsigned scale, unsigned EDGEFACTOR, double initiator[4], CCGrid &CMG, SpDCCols< IT, NT > &splitmat, bool trans, bool scramble)
Definition GenRmatDist.h:68
double A