6 #include <dune/common/timer.hh> 7 #include <dune/grid/common/partitionset.hh> 9 #include <amdis/AdaptiveGrid.hpp> 10 #include <amdis/Output.hpp> 12 #include <amdis/common/parallel/Communicator.hpp> 13 #include <amdis/common/parallel/RequestOperations.hpp> 15 #include <amdis/linearalgebra/AttributeSet.hpp> 19 template <
class RowBasis,
class ColBasis>
20 void MatrixNnzStructure::init(RowBasis
const& rowBasis, ColBasis
const& )
24 auto const& basis = rowBasis;
25 auto const& dofMap = basis.communicator().dofMap();
26 std::size_t localSize = dofMap.localSize();
27 test_exit(localSize > 0,
"Local domain has no owned DOFs!");
29 std::vector<std::set<PetscInt>> d_nz(localSize), o_nz(localSize);
33 std::map<PetscInt, std::set<PetscInt>> remote_nz;
36 auto localView = basis.localView();
37 for (
const auto& element : elements(basis.gridView(), Dune::Partitions::interior)) {
38 localView.bind(element);
39 std::size_t size = localView.size();
41 for (std::size_t i = 0; i < size; ++i) {
42 auto row = localView.index(i);
43 auto global_row = dofMap.global(row);
44 if (dofMap.owner(row)) {
45 auto local_row = dofMap.globalToLocal(global_row);
46 assert(local_row < localSize);
47 for (std::size_t j = 0; j < size; ++j) {
48 auto col = localView.index(j);
49 if (dofMap.owner(col))
50 d_nz[local_row].insert(dofMap.global(col));
52 o_nz[local_row].insert(dofMap.global(col));
55 auto& remote_row = remote_nz[global_row];
56 for (std::size_t j = 0; j < size; ++j) {
57 auto col = localView.index(j);
58 remote_row.insert(dofMap.global(col));
67 dnnz_.resize(localSize, 0);
68 onnz_.resize(localSize, 0);
70 std::transform(d_nz.begin(), d_nz.end(), dnnz_.begin(), [](
auto const& s) {
return s.size(); });
71 std::transform(o_nz.begin(), o_nz.end(), onnz_.begin(), [](
auto const& s) {
return s.size(); });
77 using Attribute = DefaultAttributeSet::Type;
78 using RemoteNz = std::vector<PetscInt>;
79 std::vector<RemoteNz> sendList(world.size());
80 std::vector<std::size_t> receiveList(world.size(), 0);
82 for (
auto const& rim : basis.communicator().remoteIndices()) {
85 auto* sourceRemoteIndexList = rim.second.first;
86 auto* targetRemoteIndexList = rim.second.second;
89 for (
auto const& ri : *sourceRemoteIndexList) {
90 auto const& lip = ri.localIndexPair();
91 Attribute remoteAttr = ri.attribute();
92 Attribute myAttr = lip.local().attribute();
93 if (myAttr == Attribute::owner && remoteAttr == Attribute::overlap) {
94 assert(dofMap.owner(lip.local().local()));
100 for (
auto const& ri : *targetRemoteIndexList) {
101 auto const& lip = ri.localIndexPair();
102 Attribute remoteAttr = ri.attribute();
103 Attribute myAttr = lip.local().attribute();
104 if (myAttr == Attribute::overlap && remoteAttr == Attribute::owner) {
105 assert(!dofMap.owner(lip.local().local()));
107 PetscInt global_row = dofMap.global(lip.local().local());
108 assert(dofMap.globalOwner(p, global_row));
110 PetscInt remoteDnnz = 0;
111 PetscInt remoteOnnz = 0;
112 for (PetscInt global_col : remote_nz[global_row]) {
113 if (dofMap.globalOwner(p, global_col))
119 auto& data = sendList[p];
120 data.push_back(global_row);
121 data.push_back(remoteDnnz);
122 data.push_back(remoteOnnz);
128 std::vector<Mpi::Request> sendRequests;
129 for (
int p = 0; p < world.size(); ++p) {
130 if (!sendList[p].empty()) {
131 sendRequests.emplace_back( world.isend(sendList[p], p, tag_) );
136 std::vector<Mpi::Request> recvRequests;
137 std::vector<RemoteNz> recvData(world.size());
138 for (
int p = 0; p < world.size(); ++p) {
139 if (receiveList[p] > 0)
140 recvRequests.emplace_back( world.irecv(recvData[p], p, tag_) );
143 Mpi::wait_all(recvRequests.begin(), recvRequests.end());
146 for (
int p = 0; p < world.size(); ++p) {
147 auto const& data = recvData[p];
148 assert(data.size() == 3*receiveList[p]);
149 for (std::size_t i = 0; i < receiveList[p]; ++i) {
150 PetscInt global_row = data[3*i];
151 assert(dofMap.globalOwner(global_row));
153 PetscInt row_dnnz = data[3*i+1];
154 assert(row_dnnz < PetscInt(localSize));
156 PetscInt row_onnz = data[3*i+2];
157 assert(row_onnz < PetscInt(dofMap.globalSize()));
159 auto local_row = dofMap.globalToLocal(global_row);
160 assert(local_row < localSize);
162 dnnz_[local_row] += row_dnnz;
163 onnz_[local_row] += row_onnz;
169 for (std::size_t i = 0; i < localSize; ++i) {
170 dnnz_[i] = std::min(dnnz_[i], PetscInt(localSize));
171 if (onnz_[i] + dnnz_[i] > PetscInt(dofMap.globalSize())) {
172 PetscInt diff = onnz_[i] + dnnz_[i] - dofMap.globalSize();
177 Mpi::wait_all(sendRequests.begin(), sendRequests.end());
181 info(2,
"update MatrixNnzStructure need {} sec", t.elapsed());
Contains all classes needed for solving linear and non linear equation systems.
Definition: AdaptBase.hpp:6
void info(int noInfoLevel, std::string const &str, Args &&... args)
prints a message, if Environment::infoLevel() >= noInfoLevel
Definition: Output.hpp:105
void test_exit(bool condition, std::string const &str, Args &&... args)
test for condition and in case of failure print message and exit
Definition: Output.hpp:163
static Dune::MPIHelper::MPICommunicator comm()
Return the MPI_Comm object (or a fake communicator)
Definition: Environment.hpp:86