AMDiS  0.3
The Adaptive Multi-Dimensional Simulation Toolbox
MatrixNnzStructure.inc.hpp
1 #pragma once
2 
3 #include <map>
4 #include <set>
5 
6 #include <dune/common/timer.hh>
7 #include <dune/grid/common/partitionset.hh>
8 
9 #include <amdis/AdaptiveGrid.hpp>
10 #include <amdis/Output.hpp>
11 #if HAVE_MPI
12 #include <amdis/common/parallel/Communicator.hpp>
13 #include <amdis/common/parallel/RequestOperations.hpp>
14 #endif
15 #include <amdis/linearalgebra/AttributeSet.hpp>
16 
17 namespace AMDiS {
18 
19 template <class RowBasis, class ColBasis>
20 void MatrixNnzStructure::init(RowBasis const& rowBasis, ColBasis const& /*colBasis*/)
21 {
22  Dune::Timer t;
23 
24  auto const& basis = rowBasis; // TODO: generalize to row != col basis
25  auto const& dofMap = basis.communicator().dofMap();
26  std::size_t localSize = dofMap.localSize();
27  test_exit(localSize > 0, "Local domain has no owned DOFs!");
28 
29  std::vector<std::set<PetscInt>> d_nz(localSize), o_nz(localSize);
30 
31  // column indices of row DOFs belonging to remote rank
32  // global-row -> {global-col}
33  std::map<PetscInt, std::set<PetscInt>> remote_nz;
34 
35  // build up local nz structure
36  auto localView = basis.localView();
37  for (const auto& element : elements(basis.gridView(), Dune::Partitions::interior)) {
38  localView.bind(element);
39  std::size_t size = localView.size();
40 
41  for (std::size_t i = 0; i < size; ++i) {
42  auto row = localView.index(i);
43  auto global_row = dofMap.global(row);
44  if (dofMap.owner(row)) {
45  auto local_row = dofMap.globalToLocal(global_row);
46  assert(local_row < localSize);
47  for (std::size_t j = 0; j < size; ++j) {
48  auto col = localView.index(j);
49  if (dofMap.owner(col))
50  d_nz[local_row].insert(dofMap.global(col));
51  else
52  o_nz[local_row].insert(dofMap.global(col));
53  }
54  } else {
55  auto& remote_row = remote_nz[global_row];
56  for (std::size_t j = 0; j < size; ++j) {
57  auto col = localView.index(j);
58  remote_row.insert(dofMap.global(col));
59  }
60  }
61  }
62 
63  localView.unbind();
64  }
65 
66  // insert size of sets into diagonal block nnz and off-diagonal block nnz
67  dnnz_.resize(localSize, 0);
68  onnz_.resize(localSize, 0);
69 
70  std::transform(d_nz.begin(), d_nz.end(), dnnz_.begin(), [](auto const& s) { return s.size(); });
71  std::transform(o_nz.begin(), o_nz.end(), onnz_.begin(), [](auto const& s) { return s.size(); });
72 
73 #if HAVE_MPI
74  Mpi::Communicator world(Environment::comm());
75 
76  // exchange remote rows
77  using Attribute = DefaultAttributeSet::Type;
78  using RemoteNz = std::vector<PetscInt>;
79  std::vector<RemoteNz> sendList(world.size());
80  std::vector<std::size_t> receiveList(world.size(), 0);
81 
82  for (auto const& rim : basis.communicator().remoteIndices()) {
83  int p = rim.first;
84 
85  auto* sourceRemoteIndexList = rim.second.first;
86  auto* targetRemoteIndexList = rim.second.second;
87 
88  // receive from overlap
89  for (auto const& ri : *sourceRemoteIndexList) {
90  auto const& lip = ri.localIndexPair();
91  Attribute remoteAttr = ri.attribute();
92  Attribute myAttr = lip.local().attribute();
93  if (myAttr == Attribute::owner && remoteAttr == Attribute::overlap) {
94  assert(dofMap.owner(lip.local().local()));
95  receiveList[p]++;
96  }
97  }
98 
99  // send to owner
100  for (auto const& ri : *targetRemoteIndexList) {
101  auto const& lip = ri.localIndexPair();
102  Attribute remoteAttr = ri.attribute();
103  Attribute myAttr = lip.local().attribute();
104  if (myAttr == Attribute::overlap && remoteAttr == Attribute::owner) {
105  assert(!dofMap.owner(lip.local().local()));
106 
107  PetscInt global_row = dofMap.global(lip.local().local());
108  assert(dofMap.globalOwner(p, global_row));
109 
110  PetscInt remoteDnnz = 0;
111  PetscInt remoteOnnz = 0;
112  for (PetscInt global_col : remote_nz[global_row]) {
113  if (dofMap.globalOwner(p, global_col))
114  remoteDnnz++;
115  else
116  remoteOnnz++;
117  }
118 
119  auto& data = sendList[p];
120  data.push_back(global_row);
121  data.push_back(remoteDnnz);
122  data.push_back(remoteOnnz);
123  }
124  }
125  }
126 
127  // 1. send remote nz structure to owner
128  std::vector<Mpi::Request> sendRequests;
129  for (int p = 0; p < world.size(); ++p) {
130  if (!sendList[p].empty()) {
131  sendRequests.emplace_back( world.isend(sendList[p], p, tag_) );
132  }
133  }
134 
135  // 2. receive nz structure belonging to own DOFs from remote node
136  std::vector<Mpi::Request> recvRequests;
137  std::vector<RemoteNz> recvData(world.size());
138  for (int p = 0; p < world.size(); ++p) {
139  if (receiveList[p] > 0)
140  recvRequests.emplace_back( world.irecv(recvData[p], p, tag_) );
141  }
142 
143  Mpi::wait_all(recvRequests.begin(), recvRequests.end());
144 
145  // 3. insert all remote nz data into my local nnz sets
146  for (int p = 0; p < world.size(); ++p) {
147  auto const& data = recvData[p];
148  assert(data.size() == 3*receiveList[p]);
149  for (std::size_t i = 0; i < receiveList[p]; ++i) {
150  PetscInt global_row = data[3*i];
151  assert(dofMap.globalOwner(global_row));
152 
153  PetscInt row_dnnz = data[3*i+1];
154  assert(row_dnnz < PetscInt(localSize));
155 
156  PetscInt row_onnz = data[3*i+2];
157  assert(row_onnz < PetscInt(dofMap.globalSize()));
158 
159  auto local_row = dofMap.globalToLocal(global_row);
160  assert(local_row < localSize);
161 
162  dnnz_[local_row] += row_dnnz;
163  onnz_[local_row] += row_onnz;
164  }
165  }
166 
167  // 4. check that the sum of diagonal and off-diagonal entries does not exceed the size of the matrix
168  // This may happen for very small size matrices
169  for (std::size_t i = 0; i < localSize; ++i) {
170  dnnz_[i] = std::min(dnnz_[i], PetscInt(localSize));
171  if (onnz_[i] + dnnz_[i] > PetscInt(dofMap.globalSize())) {
172  PetscInt diff = onnz_[i] + dnnz_[i] - dofMap.globalSize();
173  onnz_[i] -= diff;
174  }
175  }
176 
177  Mpi::wait_all(sendRequests.begin(), sendRequests.end());
178 
179 #endif // HAVE_MPI
180 
181  info(2,"update MatrixNnzStructure need {} sec", t.elapsed());
182 }
183 
184 } // end namespace AMDiS
Contains all classes needed for solving linear and non linear equation systems.
Definition: AdaptBase.hpp:6
void info(int noInfoLevel, std::string const &str, Args &&... args)
prints a message, if Environment::infoLevel() >= noInfoLevel
Definition: Output.hpp:105
void test_exit(bool condition, std::string const &str, Args &&... args)
test for condition and in case of failure print message and exit
Definition: Output.hpp:163
static Dune::MPIHelper::MPICommunicator comm()
Return the MPI_Comm object (or a fake communicator)
Definition: Environment.hpp:86