DNDSR 0.1.0.dev1+gcd065ad
Distributed Numeric Data Structure for CFV
Loading...
Searching...
No Matches
Mesh.cpp
Go to the documentation of this file.
1#include "Mesh.hpp"
3#include "Solver/Direct.hpp"
4
5#include <cstdlib>
6#include <string>
7#include <map>
8#include <set>
9#include <unordered_map>
10#include <unordered_set>
11#include <filesystem>
12#include <fmt/core.h>
13#include "DNDS/EigenPCH.hpp"
16
18
19namespace DNDS
20{
21 // DNDS_DEVICE_STORAGE_BASE_DELETER_INST(Geom::ElemInfo, )
22 // DNDS_DEVICE_STORAGE_INST(Geom::ElemInfo, DeviceBackend::Host, )
23}
24
25namespace DNDS::Geom
26{
27
28 /**
29 * \brief Reserved skeleton for parallel topology interpolation.
30 *
31 * This commented-out method is preserved as a placeholder for a future generic
32 * topology management API. Currently, mesh topology (cell2face, face2cell, etc.)
33 * is constructed serially and distributed. However, a parallel interpolation
34 * capability may be needed for:
35 * - Dynamic mesh adaptation (refinement/coarsening)
36 * - Load balancing with topology migration
37 * - Multi-physics coupling with different mesh partitions
38 *
39 * If implementing parallel topology construction, this skeleton provides the
40 * basic structure for building face-based adjacencies from cell2node without
41 * requiring a serial global mesh on rank 0.
42 *
43 * \todo Evaluate need for parallel topology API in Phase 5 refactoring.
44 */
45 // void UnstructuredMeshSerialRW::InterpolateTopology()
46 // {
47 // // count node 2 face
48 // DNDS_MAKE_SSP(cell2faceSerial, mesh->getMPI());
49 // DNDS_MAKE_SSP(face2cellSerial, mesh->getMPI());
50 // DNDS_MAKE_SSP(face2nodeSerial, mesh->getMPI());
51 // DNDS_MAKE_SSP(faceElemInfoSerial, ElemInfo::CommType(), ElemInfo::CommMult(), mesh->getMPI());
52
53 // if (mRank != mesh->getMPI().rank)
54 // return;
55
56 // for (DNDS::index iCell = 0; iCell <cell2nodeSerial->Size(); iCell++)
57 // {
58 // // Parallel face construction logic would go here
59 // }
60 // }
61
62 /*******************************************************************************************************************/
63 /*******************************************************************************************************************/
64 /*******************************************************************************************************************/
65
66 // Partition2LocalIdx, Partition2Serial2Global, ConvertAdjSerial2Global,
67 // TransferDataSerial2Global are now in Mesh_PartitionHelpers.hpp
68 // so they can be shared with Mesh_ReadSerializeDistributed.cpp.
69
70 //! inefficient, use Partition2Serial2Global ! only used for convenient comparison
71 void PushInfo2Serial2Global(std::vector<DNDS::index> &serial2Global,
72 DNDS::index localSize,
73 const std::vector<DNDS::index> &pushIndex,
74 const std::vector<DNDS::index> &pushIndexStart,
75 const DNDS::MPIInfo &mpi)
76 {
77 tIndPair Serial2Global;
78 Serial2Global.InitPair("Serial2Global", mpi);
79 Serial2Global.father->Resize(localSize);
80 Serial2Global.TransAttach();
81 Serial2Global.trans.createFatherGlobalMapping();
82 Serial2Global.trans.createGhostMapping(pushIndex, pushIndexStart);
83 Serial2Global.trans.createMPITypes();
84 Serial2Global.son->createGlobalMapping();
85 // Set son to son's global
86 for (DNDS::index iSon = 0; iSon < Serial2Global.son->Size(); iSon++)
87 (*Serial2Global.son)[iSon] = Serial2Global.son->pLGlobalMapping->operator()(mpi.rank, iSon);
88 Serial2Global.trans.pushOnce();
89 serial2Global.resize(localSize);
90 for (DNDS::index iFat = 0; iFat < Serial2Global.father->Size(); iFat++)
91 serial2Global[iFat] = Serial2Global.father->operator[](iFat);
92 }
93
94 // template <class TAdj = tAdj1>
95 // void ConvertAdjSerial2Global(TAdj &arraySerialAdj,
96 // const std::vector<DNDS::index> &partitionJSerial2Global,
97 // const DNDS::MPIInfo &mpi)
98 // {
99 // }
100 /*******************************************************************************************************************/
101 /*******************************************************************************************************************/
102 /*******************************************************************************************************************/
103
106 {
107 if (mesh->getMPI().rank == mRank)
108 DNDS::log() << "UnstructuredMeshSerialRW === Doing PartitionReorderToMeshCell2Cell" << std::endl;
109 DNDS_assert(cnPart == mesh->getMPI().size);
110 // * 1: get the nodal partition
111 nodePartition.resize(coordSerial->Size(), static_cast<DNDS::MPI_int>(INT32_MAX));
112 for (DNDS::index iCell = 0; iCell < cell2nodeSerial->Size(); iCell++)
113 for (DNDS::rowsize ic2n = 0; ic2n < (*cell2nodeSerial).RowSize(iCell); ic2n++)
114 nodePartition[(*cell2nodeSerial)(iCell, ic2n)] = std::min(nodePartition[(*cell2nodeSerial)(iCell, ic2n)], cellPartition.at(iCell));
115 // * 1: get the bnd partition
116 bndPartition.resize(bnd2cellSerial->Size());
117 for (DNDS::index iBnd = 0; iBnd < bnd2cellSerial->Size(); iBnd++)
118 bndPartition[iBnd] = cellPartition[(*bnd2cellSerial)(iBnd, 0)];
119
120 std::vector<DNDS::index> cell_push, cell_pushStart, node_push, node_pushStart, bnd_push, bnd_pushStart;
121 Partition2LocalIdx(cellPartition, cell_push, cell_pushStart, mesh->getMPI());
122 Partition2LocalIdx(nodePartition, node_push, node_pushStart, mesh->getMPI());
123 Partition2LocalIdx(bndPartition, bnd_push, bnd_pushStart, mesh->getMPI());
124 std::vector<DNDS::index> cell_Serial2Global, node_Serial2Global, bnd_Serial2Global;
125 Partition2Serial2Global(cellPartition, cell_Serial2Global, mesh->getMPI(), mesh->getMPI().size);
126 Partition2Serial2Global(nodePartition, node_Serial2Global, mesh->getMPI(), mesh->getMPI().size);
127 // Partition2Serial2Global(bndPartition, bnd_Serial2Global, mesh->getMPI(), mesh->getMPI().size);//seems not needed for now
128 // PushInfo2Serial2Global(cell_Serial2Global, cellPartition.size(), cell_push, cell_pushStart, mesh->getMPI());//*safe validation version
129 // PushInfo2Serial2Global(node_Serial2Global, nodePartition.size(), node_push, node_pushStart, mesh->getMPI());//*safe validation version
130 // PushInfo2Serial2Global(bnd_Serial2Global, bndPartition.size(), bnd_push, bnd_pushStart, mesh->getMPI()); //*safe validation version
131 if (mesh->getMPI().rank == mRank)
132 DNDS::log() << "UnstructuredMeshSerialRW === Doing PartitionReorderToMeshCell2Cell ConvertAdjSerial2Global" << std::endl;
133 ConvertAdjSerial2Global(cell2nodeSerial, node_Serial2Global, mesh->getMPI());
134 // !cell2cell discarded
135 // ConvertAdjSerial2Global(cell2cellSerial, cell_Serial2Global, mesh->getMPI());
136 ConvertAdjSerial2Global(bnd2nodeSerial, node_Serial2Global, mesh->getMPI());
137 ConvertAdjSerial2Global(bnd2cellSerial, cell_Serial2Global, mesh->getMPI());
138
139 mesh->coords.InitPair("coords", mesh->getMPI());
140 mesh->cellElemInfo.InitPair("cellElemInfo", ElemInfo::CommType(), ElemInfo::CommMult(), mesh->getMPI());
141 mesh->bndElemInfo.InitPair("bndElemInfo", ElemInfo::CommType(), ElemInfo::CommMult(), mesh->getMPI());
142 mesh->cell2node.InitPair("cell2node", mesh->getMPI());
143 mesh->cell2cellOrig.InitPair("cell2cellOrig", mesh->getMPI());
144 mesh->node2nodeOrig.InitPair("node2nodeOrig", mesh->getMPI());
145 mesh->bnd2bndOrig.InitPair("bnd2bndOrig", mesh->getMPI());
146 if (mesh->isPeriodic)
147 mesh->cell2nodePbi.InitPair("cell2nodePbi", mesh->getMPI());
148 // !cell2cell discarded
149 mesh->bnd2node.InitPair("bnd2node", mesh->getMPI());
150 mesh->bnd2cell.InitPair("bnd2cell", mesh->getMPI());
151 if (mesh->isPeriodic)
152 mesh->bnd2nodePbi.InitPair("bnd2nodePbi", mesh->getMPI());
153
154 // coord transferring
155 if (mesh->getMPI().rank == mRank)
156 DNDS::log() << "UnstructuredMeshSerialRW === Doing PartitionReorderToMeshCell2Cell Trasfer Data Coord" << std::endl;
157 TransferDataSerial2Global(coordSerial, mesh->coords.father, node_push, node_pushStart, mesh->getMPI());
158 TransferDataSerial2Global(node2nodeOrigSerial, mesh->node2nodeOrig.father, node_push, node_pushStart, mesh->getMPI());
159
160 if (mesh->getMPI().rank == mRank)
161 DNDS::log() << "UnstructuredMeshSerialRW === Doing PartitionReorderToMeshCell2Cell Trasfer Data Cell" << std::endl;
162 // cells transferring
163 // !cell2cell discarded
164 // TransferDataSerial2Global(cell2cellSerial, mesh->cell2cell.father, cell_push, cell_pushStart, mesh->getMPI());
165 TransferDataSerial2Global(cell2nodeSerial, mesh->cell2node.father, cell_push, cell_pushStart, mesh->getMPI());
166 TransferDataSerial2Global(cell2cellOrigSerial, mesh->cell2cellOrig.father, cell_push, cell_pushStart, mesh->getMPI());
167 if (mesh->isPeriodic)
168 TransferDataSerial2Global(cell2nodePbiSerial, mesh->cell2nodePbi.father, cell_push, cell_pushStart, mesh->getMPI());
169 TransferDataSerial2Global(cellElemInfoSerial, mesh->cellElemInfo.father, cell_push, cell_pushStart, mesh->getMPI());
170 if (mesh->getMPI().rank == mRank)
171 DNDS::log() << "UnstructuredMeshSerialRW === Doing PartitionReorderToMeshCell2Cell Trasfer Data Bnd" << std::endl;
172 // bnds transferring
173 TransferDataSerial2Global(bnd2cellSerial, mesh->bnd2cell.father, bnd_push, bnd_pushStart, mesh->getMPI());
174 TransferDataSerial2Global(bnd2nodeSerial, mesh->bnd2node.father, bnd_push, bnd_pushStart, mesh->getMPI());
175 TransferDataSerial2Global(bndElemInfoSerial, mesh->bndElemInfo.father, bnd_push, bnd_pushStart, mesh->getMPI());
176 TransferDataSerial2Global(bnd2bndOrigSerial, mesh->bnd2bndOrig.father, bnd_push, bnd_pushStart, mesh->getMPI());
177 if (mesh->isPeriodic)
178 TransferDataSerial2Global(bnd2nodePbiSerial, mesh->bnd2nodePbi.father, bnd_push, bnd_pushStart, mesh->getMPI());
179
180 {
181 DNDS::MPISerialDo(mesh->getMPI(), [&]()
182 { log() << "[" << mesh->getMPI().rank << ": nCell " << mesh->cell2node.father->Size() << "] " << (((mesh->getMPI().rank + 1) % 10) ? "" : "\n") << std::flush; });
183 MPI::Barrier(mesh->getMPI().comm);
184 if (mesh->getMPI().rank == 0)
185 log() << std::endl;
186 DNDS::MPISerialDo(mesh->getMPI(), [&]()
187 { log() << "[" << mesh->getMPI().rank << ": nNode " << mesh->coords.father->Size() << "] " << (((mesh->getMPI().rank + 1) % 10) ? "" : "\n") << std::flush; });
188 MPI::Barrier(mesh->getMPI().comm);
189 if (mesh->getMPI().rank == 0)
190 log() << std::endl;
191 DNDS::MPISerialDo(mesh->getMPI(), [&]()
192 { log() << "[" << mesh->getMPI().rank << ": nBnd " << mesh->bnd2node.father->Size() << "] " << (((mesh->getMPI().rank + 1) % 10) ? "" : "\n") << std::flush; });
193 MPI::Barrier(mesh->getMPI().comm);
194 if (mesh->getMPI().rank == 0)
195 log() << std::endl;
196 }
197 mesh->adjPrimaryState = Adj_PointToGlobal;
198 if (mesh->getMPI().rank == mRank)
199 DNDS::log() << "UnstructuredMeshSerialRW === Done PartitionReorderToMeshCell2Cell" << std::endl;
200 }
201
204 {
205 DNDS_assert(mesh->adjPrimaryState == Adj_PointToGlobal);
207 dataIsSerialIn = false;
208 dataIsSerialOut = true;
209
210 std::vector<DNDS::index> serialPullCell;
211 std::vector<DNDS::index> serialPullNode;
212 // std::vector<DNDS::index> serialPullBnd;
213
214 DNDS::index numCellGlobal = mesh->cellElemInfo.father->globalSize();
215 // DNDS::index numBndGlobal = mesh->bndElemInfo.father->globalSize();
216 DNDS::index numNodeGlobal = mesh->coords.father->globalSize();
217
218 if (mesh->getMPI().rank == mRank)
219 {
220 serialPullCell.resize(numCellGlobal);
221 serialPullNode.resize(numNodeGlobal);
222 // serialPullBnd.reserve(numBndGlobal);
223 for (DNDS::index i = 0; i < numCellGlobal; i++)
224 serialPullCell[i] = i;
225 for (DNDS::index i = 0; i < numNodeGlobal; i++)
226 serialPullNode[i] = i;
227 // for (DNDS::index i = 0; i < numBndGlobal; i++)
228 // serialPullBnd[i] = i;
229 }
230 cell2nodeSerial = make_ssp<decltype(cell2nodeSerial)::element_type>(ObjName{"cell2nodeSerial"}, mesh->getMPI());
231 if (mesh->isPeriodic)
232 cell2nodePbiSerial = make_ssp<decltype(cell2nodePbiSerial)::element_type>(ObjName{"cell2nodePbiSerial"}, NodePeriodicBits::CommType(), NodePeriodicBits::CommMult(), mesh->getMPI());
233 coordSerial = make_ssp<decltype(coordSerial)::element_type>(ObjName{"coordSerial"}, mesh->getMPI());
234 cellElemInfoSerial = make_ssp<decltype(cellElemInfoSerial)::element_type>(ObjName{"cellElemInfoSerial"}, ElemInfo::CommType(), ElemInfo::CommMult(), mesh->getMPI());
235
236 coordSerialOutTrans.setFatherSon(mesh->coords.father, coordSerial);
237 cell2nodeSerialOutTrans.setFatherSon(mesh->cell2node.father, cell2nodeSerial);
238 if (mesh->isPeriodic)
239 cell2nodePbiSerialOutTrans.setFatherSon(mesh->cell2nodePbi.father, cell2nodePbiSerial);
240 // bnd2nodeSerialOutTrans.setFatherSon(mesh->bnd2node.father, bnd2nodeSerial);
241 cellElemInfoSerialOutTrans.setFatherSon(mesh->cellElemInfo.father, cellElemInfoSerial);
242 // bndElemInfoSerialOutTrans.setFatherSon(mesh->bndElemInfo.father, bndElemInfoSerial);
243
244 // Father could already have global mapping, result should be the same
245 coordSerialOutTrans.createFatherGlobalMapping();
246 cell2nodeSerialOutTrans.createFatherGlobalMapping();
247 if (mesh->isPeriodic)
248 cell2nodePbiSerialOutTrans.createFatherGlobalMapping();
249 // bnd2nodeSerialOutTrans.createFatherGlobalMapping();
250 cellElemInfoSerialOutTrans.createFatherGlobalMapping();
251 // bndElemInfoSerialOutTrans.createFatherGlobalMapping();
252
253 coordSerialOutTrans.createGhostMapping(serialPullNode);
254 cell2nodeSerialOutTrans.createGhostMapping(serialPullCell);
255 if (mesh->isPeriodic)
256 cell2nodePbiSerialOutTrans.createGhostMapping(serialPullCell);
257 // bnd2nodeSerialOutTrans.createGhostMapping(serialPullBnd);
258 cellElemInfoSerialOutTrans.BorrowGGIndexing(cell2nodeSerialOutTrans); // accidentally rewrites mesh->cellElemInfo.father's global mapping but ok
259 // bndElemInfoSerialOutTrans.BorrowGGIndexing(bnd2nodeSerialOutTrans);
260
261 coordSerialOutTrans.createMPITypes();
262 cell2nodeSerialOutTrans.createMPITypes();
263 if (mesh->isPeriodic)
264 cell2nodePbiSerialOutTrans.createMPITypes();
265 // bnd2nodeSerialOutTrans.createMPITypes();
266 cellElemInfoSerialOutTrans.createMPITypes();
267 // bndElemInfoSerialOutTrans.createMPITypes();
268
269 coordSerialOutTrans.pullOnce();
270 cell2nodeSerialOutTrans.pullOnce();
271 if (mesh->isPeriodic)
273 // bnd2nodeSerialOutTrans.pullOnce();
275 // bndElemInfoSerialOutTrans.pullOnce();
276 if (mesh->getMPI().rank == mRank)
277 {
278 DNDS::log() << "UnstructuredMeshSerialRW === BuildSerialOut Done " << std::endl;
279 }
280 }
281
282}
283
284namespace DNDS::Geom
285{
286
288 {
289 using namespace Elem;
290 int hasBad = 0;
291 for (index iCell = 0; iCell < cellElemInfo.Size(); iCell++)
292 {
293 auto eType = cellElemInfo(iCell, 0).getElemType();
294 if (eType == ElemType::Line2 ||
295 eType == ElemType::Tri3 ||
296 eType == ElemType::Quad4 ||
297 eType == ElemType::Tet4 ||
298 eType == ElemType::Hex8 ||
299 eType == ElemType::Prism6 ||
300 eType == ElemType::Pyramid5)
301 {
302 continue;
303 }
304 else
305 {
306 hasBad = 1;
307 break;
308 }
309 }
310 int hasBadAll;
312 return hasBad == 0;
313 }
314
316 {
317 using namespace Elem;
318 int hasBad = 0;
319 for (index iCell = 0; iCell < cellElemInfo.Size(); iCell++)
320 {
321 auto eType = cellElemInfo(iCell, 0).getElemType();
322 if (eType == ElemType::Line3 ||
323 eType == ElemType::Tri6 ||
324 eType == ElemType::Quad9 ||
325 eType == ElemType::Tet10 ||
326 eType == ElemType::Hex27 ||
327 eType == ElemType::Prism18 ||
328 eType == ElemType::Pyramid14)
329 {
330 continue;
331 }
332 else
333 {
334 hasBad = 1;
335 break;
336 }
337 }
338 int hasBadAll;
340 return hasBad == 0;
341 }
342
343 using tIndexMapFunc = std::function<index(index)>;
344
345 static void GeneralCell2NodeToNode2Cell(
346 tCoordPair &coords, tAdjPair &cell2node, tAdjPair &node2cell,
347 const tIndexMapFunc &CellIndexLocal2Global_NoSon,
348 const tIndexMapFunc &NodeIndexLocal2Global_NoSon)
349 {
350 const auto &mpi = coords.father->getMPI();
351 std::unordered_set<index> ghostNodesCompactSet;
352 std::vector<index> ghostNodesCompact;
353 std::unordered_map<index, std::unordered_set<index>> node2CellLocalRecord;
354
355 for (index iCell = 0; iCell < cell2node.father->Size(); iCell++)
356 for (auto iNode : cell2node[iCell])
357 {
358 auto [ret, rank, val] = coords.father->pLGlobalMapping->search(iNode);
359 DNDS_assert_info(ret, "search failed");
360 if (rank != mpi.rank)
361 ghostNodesCompact.push_back(iNode), ghostNodesCompactSet.insert(iNode);
362 node2CellLocalRecord[iNode].insert(CellIndexLocal2Global_NoSon(iCell));
363 }
364
365 // MPI_Barrier(mpi.comm);
366 // std::cout << "here2 " << std::endl;
367
368 tAdj node2cellPast; // + node2cell * a triplet to deal with reverse inserting
369 node2cell.InitPair("node2cell", mpi);
370 node2cellPast = make_ssp<tAdj::element_type>(ObjName{"node2cellPast"}, mpi);
371 //* fill into father
372 node2cell.father->Resize(coords.father->Size());
373 for (index iNode = 0; iNode < coords.father->Size(); iNode++)
374 {
375 index iNodeG = NodeIndexLocal2Global_NoSon(iNode);
376 if (node2CellLocalRecord.count(iNodeG))
377 {
378 node2cell.ResizeRow(iNode, node2CellLocalRecord[iNodeG].size());
379 rowsize in2c = 0;
380 for (auto v : node2CellLocalRecord[iNodeG])
381 node2cell(iNode, in2c++) = v;
382 }
383 }
384 node2cell.TransAttach();
385 node2cell.trans.createFatherGlobalMapping();
386 node2cell.trans.createGhostMapping(ghostNodesCompact);
387 //* fill into son
388 node2cell.son->Resize(node2cell.trans.pLGhostMapping->ghostIndex.size());
389 // std::unordered_set<index> touched; // only used for checking
390 for (auto &[k, s] : node2CellLocalRecord)
391 {
392 MPI_int rank{-1};
393 index val{-1};
394 if (!node2cell.trans.pLGhostMapping->search(k, rank, val))
395 DNDS_assert_info(false, "search failed");
396 if (rank >= 0)
397 {
398 node2cell.son->ResizeRow(val, s.size());
399 rowsize in2c = 0;
400 for (auto v : s)
401 node2cell.son->operator()(val, in2c++) = v;
402 // touched.insert(val);
403 }
404 }
405 // DNDS_assert(touched.size() == node2cell.son->Size());
406
407 // node2cell.trans.pLGhostMapping->pushingIndexGlobal; // where to receive in a push
409 node2cellPastTrans.setFatherSon(node2cell.son, node2cellPast);
410 node2cellPastTrans.createFatherGlobalMapping();
411 std::vector<index> pushSonSeries(node2cell.son->Size());
412 for (index i = 0; i < node2cell.son->Size(); i++)
413 pushSonSeries[i] = i;
414 node2cellPastTrans.createGhostMapping(pushSonSeries, node2cell.trans.pLGhostMapping->ghostStart);
415 node2cellPastTrans.createMPITypes();
416
417 node2cellPastTrans.pullOnce();
418 DNDS_assert(DNDS::size_to_index(node2cell.trans.pLGhostMapping->ghostIndex.size()) == node2cell.son->Size());
419 DNDS_assert(DNDS::size_to_index(node2cell.trans.pLGhostMapping->pushingIndexGlobal.size()) == node2cellPast->Size());
420 // * this state of triplet: node2cell.father - node2cell.son - node2cellPast forms a "unique pushing" for the pair node2cell
421 // * should be made into some standard
422 for (index i = 0; i < node2cellPast->Size(); i++)
423 {
424 index iNodeG = node2cell.trans.pLGhostMapping->pushingIndexGlobal[i]; //?should be right
425 for (auto iCell : (*node2cellPast)[i])
426 node2CellLocalRecord[iNodeG].insert(iCell);
427 }
428 // MPISerialDo(
429 // mpi,
430 // [&]()
431 // {
432 // for (auto &[k, s] : node2CellLocalRecord)
433 // {
434 // if (NodeIndexGlobal2Local_NoSon(k) >= 0 && s.size() != 4)
435 // std::cout << k << ", " << s.size() << "; " << std::flush;
436 // }
437 // std::cout << std::endl;
438 // });
439
440 // reset pair
441 node2cell.InitPair("node2cell", mpi);
442 //* fill into father
443 node2cell.father->Resize(coords.father->Size());
444 for (index iNode = 0; iNode < coords.father->Size(); iNode++)
445 {
446 index iNodeG = NodeIndexLocal2Global_NoSon(iNode);
447 if (node2CellLocalRecord.count(iNodeG))
448 {
449 node2cell.ResizeRow(iNode, node2CellLocalRecord[iNodeG].size());
450 rowsize in2c = 0;
451 for (auto v : node2CellLocalRecord[iNodeG])
452 node2cell(iNode, in2c++) = v;
453 }
454 }
455 }
456
458 const tPoint &translation1,
459 const tPoint &rotationCenter1,
460 const tPoint &eulerAngles1,
461 const tPoint &translation2,
462 const tPoint &rotationCenter2,
463 const tPoint &eulerAngles2,
464 const tPoint &translation3,
465 const tPoint &rotationCenter3,
466 const tPoint &eulerAngles3)
467 {
468 periodicInfo.translation[1].map() = translation1;
469 periodicInfo.translation[2].map() = translation2;
470 periodicInfo.translation[3].map() = translation3;
474 periodicInfo.rotation[1].map() =
478 periodicInfo.rotation[2].map() =
482 periodicInfo.rotation[3].map() =
486 }
487
490 {
495
496 /*****************************************************/
497 // * first recover node2cell
498
499 if (!coords.father->pLGlobalMapping)
500 coords.father->createGlobalMapping(); // for NodeIndexLocal2Global_NoSon
501 if (!cell2node.father->pLGlobalMapping)
502 cell2node.father->createGlobalMapping(); // for CellIndexLocal2Global_NoSon
503
504 GeneralCell2NodeToNode2Cell(
506 [this](index v)
507 { return this->CellIndexLocal2Global_NoSon(v); },
508 [this](index v)
509 { return this->NodeIndexLocal2Global_NoSon(v); });
510
511 if (!bnd2node.father->pLGlobalMapping)
512 bnd2node.father->createGlobalMapping(); // for BndIndexLocal2Global_NoSon
513 GeneralCell2NodeToNode2Cell(
515 [this](index v)
516 { return this->BndIndexLocal2Global_NoSon(v); },
517 [this](index v)
518 { return this->NodeIndexLocal2Global_NoSon(v); });
519
521
522 // if (mpi.rank == 0)
523 // {
524 // for (index i = 0; i < node2cell.father->Size(); i++)
525 // std::cout << node2cell.RowSize(i) - 4 << std::endl;
526 // for (index i = 0; i < node2bnd.father->Size(); i++)
527 // std::cout << node2bnd.RowSize(i) + 10 << std::endl;
528 // }
529
530 // node2cell.TransAttach();
531 // node2cell.trans.createFatherGlobalMapping();
532 // node2cell.trans.createGhostMapping(ghostNodesCompact);
533 // node2cell.trans.createMPITypes();
534 // node2cell.trans.pullOnce();
535
536 // mesh->node2cell.TransAttach();
537 // mesh->node2cell.trans.BorrowGGIndexing(mesh->coords);
538 // mesh->node2cell.trans.createMPITypes();
539 // mesh->node2cell.trans.pullOnce();
540 }
541
543 {
550
552 coords.trans.createFatherGlobalMapping(); // for NodeIndexLocal2Global_NoSon
554 cell2node.trans.createFatherGlobalMapping(); // for CellIndexLocal2Global_NoSon
556 bnd2node.trans.createFatherGlobalMapping(); // for BndIndexLocal2Global_NoSon
557
558 // std::cout << "RecoverCell2CellAndBnd2Cell here1" << std::endl;
559
560 std::unordered_set<index> ghostNodesCompactSet;
561 for (index i = 0; i < cell2node.father->Size(); i++)
562 for (auto in : cell2node.father->operator[](i))
564 ghostNodesCompactSet.insert(in);
565 for (index i = 0; i < bnd2node.father->Size(); i++)
566 for (auto in : bnd2node.father->operator[](i))
568 ghostNodesCompactSet.insert(in);
569 std::vector<index> ghostNodes;
570 ghostNodes.reserve(ghostNodesCompactSet.size());
571 for (auto v : ghostNodesCompactSet)
572 ghostNodes.push_back(v);
573 // std::cout << "RecoverCell2CellAndBnd2Cell here2" << std::endl;
574 node2cell.son = make_ssp<decltype(node2cell.son)::element_type>(ObjName{"node2cell.son"}, mpi);
576 node2cell.trans.createFatherGlobalMapping();
577 node2cell.trans.createGhostMapping(ghostNodes);
578 node2cell.trans.createMPITypes(); //! warning, this is not actual final official trans, just needed temporarily
579 node2cell.trans.pullOnce();
580
581 std::unordered_map<index, index> iNodeGlobal2LocalAppendInNode2Cell;
582
583 for (index i = 0; i < node2cell.Size(); i++)
584 iNodeGlobal2LocalAppendInNode2Cell[node2cell.trans.pLGhostMapping->operator()(-1, i)] = i;
585
586 for (index i = 0; i < cell2node.father->Size(); i++)
587 for (auto in : cell2node.father->operator[](i))
589
590 cell2cell.InitPair("cell2cell", mpi); // actual outputs need empty but constructed son
591 cell2cell.father->Resize(cell2node.father->Size());
592 for (index i = 0; i < cell2node.father->Size(); i++)
593 {
594 std::set<index> cellRec;
595 for (auto in : cell2node.father->operator[](i))
596 {
599 cellRec.insert(ico);
600 }
601 auto ret = cellRec.erase(CellIndexLocal2Global_NoSon(i));
602 DNDS_assert(ret == 1);
603 cell2cell.father->ResizeRow(i, cellRec.size());
604 rowsize ic2c = 0;
605 for (auto v : cellRec)
606 cell2cell.father->operator()(i, ic2c++) = v;
607 }
608 // std::cout << "RecoverCell2CellAndBnd2Cell here2.5" << std::endl;
609 bnd2cell.InitPair("bnd2cell", mpi); // actual outputs need empty but constructed son
610 bnd2cell.father->Resize(bnd2node.father->Size());
611
612 // For periodic meshes, store per-bnd candidate cell sets from the node
613 // intersection pass, then resolve via pbi check in a second pass after
614 // ghost-pulling cell2node/cell2nodePbi for the candidate cells.
615 std::vector<std::set<index>> bndCellCandidates;
616 if (isPeriodic)
617 bndCellCandidates.resize(bnd2node.father->Size());
618
619 for (index i = 0; i < bnd2node.father->Size(); i++)
620 {
621 std::set<index> cellRecCur;
622 bool initDone{false};
623 // std::cout << "RecoverCell2CellAndBnd2Cell here L1 1" << std::endl;
624 for (auto in : bnd2node.father->operator[](i))
625 {
627 std::set<index> cellRecCurNew;
629 if (!initDone || cellRecCur.count(ico))
630 cellRecCurNew.insert(ico);
631 std::swap(cellRecCur, cellRecCurNew);
632 initDone = true;
633 }
634
635 // Periodic pbi check and bnd2cell assignment are deferred to the
636 // second pass below (after ghost-pulling cell2node/cell2nodePbi for
637 // the candidate cells). Store candidates per bnd for now.
638 if (isPeriodic)
639 bndCellCandidates[i] = std::move(cellRecCur);
640 else
641 {
642 DNDS_assert_info(cellRecCur.size() == 1, fmt::format("cellRecCur.size() is [{}]", cellRecCur.size()));
643 bnd2cell.father->operator()(i, 0) = *cellRecCur.begin();
644 bnd2cell.father->operator()(i, 1) = UnInitIndex;
645 }
646 }
647
648 /************************************************************/
649 // Periodic: ghost-pull cell2node and cell2nodePbi for all candidate
650 // cells, then do the pbi filter and donor/receiver assignment.
651 /************************************************************/
652 if (isPeriodic)
653 {
654 // Collect all unique candidate cells across all periodic bnds
655 std::vector<index> neededCells;
656 for (index i = 0; i < bnd2cell.father->Size(); i++)
657 {
658 if (!Geom::FaceIDIsPeriodic(bndElemInfo.father->operator()(i, 0).zone))
659 continue;
660 for (auto ic : bndCellCandidates[i])
661 neededCells.push_back(ic);
662 }
663
666 cell2nodePbi.trans.createFatherGlobalMapping();
667 cell2nodePbi.trans.createGhostMapping(neededCells); //! warning, this is not actual final official trans, just needed temporarily
668 cell2nodePbi.trans.createMPITypes();
669 cell2nodePbi.trans.pullOnce();
670 cell2node.son = make_ssp<decltype(cell2node.son)::element_type>(ObjName{"cell2node.son"}, mpi);
671 cell2node.BorrowAndPull(cell2nodePbi); //! warning, this is not actual final official trans, just needed temporarily
672
673 // Now do the periodic pbi filter for each bnd
674 for (index i = 0; i < bnd2cell.father->Size(); i++)
675 {
676 if (!Geom::FaceIDIsPeriodic(bndElemInfo.father->operator()(i, 0).zone))
677 continue;
678
679 auto &cellRecCur = bndCellCandidates[i]; // the pre-pbi candidates (from node intersection)
680 std::set<index> cellRecFiltered;
681 for (auto ic : cellRecCur)
682 {
683 auto [ret, rank, icAppend] = cell2nodePbi.trans.pLGhostMapping->search_indexAppend(ic);
684 DNDS_assert_info(ret, fmt::format("periodic bnd {} candidate cell {} not found in ghost mapping", i, ic));
685
686 bool cellContainsBnd = true;
687 for (int ib2n = 0; ib2n < bnd2node.father->operator[](i).size(); ib2n++)
688 {
689 index iNode = bnd2node.father->operator[](i)[ib2n];
690 auto iNodePbi = bnd2nodePbi.father->operator[](i)[ib2n];
692 for (int ic2n = 0; ic2n < cell2node[icAppend].size(); ic2n++)
693 if (iNode == cell2node(icAppend, ic2n))
694 {
698 }
700 if (nIndexPBIMatchNode == 0)
701 cellContainsBnd = false;
702 }
703 if (cellContainsBnd)
704 cellRecFiltered.insert(ic);
705 }
706
707 // Periodic bnd: 2 cells or 1 (self-periodic)
708 DNDS_assert_info(cellRecFiltered.size() == 2 || cellRecFiltered.size() == 1,
709 fmt::format("periodic bnd {} has {} cells after pbi filter", i, cellRecFiltered.size()));
710 auto it = cellRecFiltered.begin();
711 bnd2cell.father->operator()(i, 0) = *it;
712 if (cellRecFiltered.size() == 2)
713 ++it;
714 bnd2cell.father->operator()(i, 1) = *it;
715 }
716
717 // Swap check: ensure bnd2cell(i, 0) is the donor-side cell
718 for (index i = 0; i < bnd2cell.father->Size(); i++)
719 if (bnd2cell(i, 1) != UnInitIndex && bnd2cell(i, 0) != bnd2cell(i, 1) /* no need to check if both sides are same*/)
720 {
721 index ic0 = bnd2cell(i, 0);
722 index ic1 = bnd2cell(i, 1);
723 auto [ret0, rank0, ic0L] = cell2nodePbi.trans.pLGhostMapping->search_indexAppend(ic0);
724 auto [ret1, rank1, ic1L] = cell2nodePbi.trans.pLGhostMapping->search_indexAppend(ic1);
725 // std::cout << fmt::format("ic0L ic1L, {},{}", ic0L, ic1L) << std::endl;
726 DNDS_assert_info(ret0 && ret1, "search failed");
727 std::vector<Geom::NodePeriodicBits> pbi0, pbi1;
728
729 for (auto in : bnd2node[i])
730 {
731 bool found0{false}, found1{false};
732 for (rowsize ic2n = 0; ic2n < cell2node[ic0L].size(); ic2n++)
733 if (cell2node[ic0L][ic2n] == in)
734 found0 = true, pbi0.push_back(cell2nodePbi(ic0L, ic2n));
735 for (rowsize ic2n = 0; ic2n < cell2node[ic1L].size(); ic2n++)
736 if (cell2node[ic1L][ic2n] == in)
737 found1 = true, pbi1.push_back(cell2nodePbi(ic1L, ic2n));
739 }
741 {
742 if (Geom::FaceIDIsPeriodic1(bndElemInfo(i, 0).zone))
743 b = b & nodePB1;
744 else if (Geom::FaceIDIsPeriodic2(bndElemInfo(i, 0).zone))
745 b = b & nodePB2;
746 else if (Geom::FaceIDIsPeriodic3(bndElemInfo(i, 0).zone))
747 b = b & nodePB3;
748 };
749 for (auto &b : pbi0)
751 for (auto &b : pbi1)
753
755 if (bndElemInfo(i, 0).zone == Geom::BC_ID_PERIODIC_1_DONOR)
757 else if (bndElemInfo(i, 0).zone == Geom::BC_ID_PERIODIC_2_DONOR)
758 bndBit.setP2True();
759 else if (bndElemInfo(i, 0).zone == Geom::BC_ID_PERIODIC_3_DONOR)
760 bndBit.setP3True();
761 else if (!Geom::FaceIDIsPeriodic(bndElemInfo(i, 0).zone))
762 DNDS_assert_info(false, "this bnd with both sides has to be periodic");
763
764 bool match0{true}, match1{true};
765 for (auto b : pbi0)
766 if (b ^ bndBit)
767 match0 = false;
768 for (auto b : pbi1)
769 if (b ^ bndBit)
770 match1 = false;
771 if (match0 && !match1)
772 ; // keep
773 else if (match1 && !match0)
774 std::swap(bnd2cell(i, 0), bnd2cell(i, 1));
775 else
776 {
777 if (mpi.rank >= 0)
778 {
779 for (auto b : pbi0)
780 DNDS::log() << b << ", ";
781 DNDS::log() << " ---- ";
782 for (auto b : pbi1)
783 DNDS::log() << b << ", ";
784 DNDS::log() << " ---- " << bndBit;
785 DNDS::log() << std::endl;
786 }
787 DNDS_assert_info(false,
788 match0
789 ? "this periodic bnd matches both sides"
790 : "this periodic bnd matches no sides");
791 }
792 // if (mpi.rank == 0)
793 // {
794 // if (match1 && !match0)
795 // std::cout << "swap " << i << std::endl;
796 // }
797 }
798 }
799 }
800
803 {
805 DNDS_assert(cell2cell.father && cell2cell.father->Size() == this->NumCell());
806 DNDS_assert(bnd2cell.father && bnd2cell.father->Size() == this->NumBnd());
807 /********************************/
808 // cells
809 {
813 if (isPeriodic)
816
817 cell2cell.trans.createFatherGlobalMapping();
818
819 std::vector<DNDS::index> ghostCells;
820 for (DNDS::index iCell = 0; iCell < cell2cell.father->Size(); iCell++)
821 {
822 for (DNDS::rowsize ic2c = 0; ic2c < cell2cell.father->RowSize(iCell); ic2c++)
823 {
825 DNDS::MPI_int rank;
826 DNDS::index val;
827 if (!cell2cell.trans.pLGlobalMapping->search(iCellOther, rank, val))
828 DNDS_assert_info(false, "search failed");
829 if (rank != mpi.rank)
830 ghostCells.push_back(iCellOther);
831 }
832 }
833 cell2cell.trans.createGhostMapping(ghostCells);
834
835 cell2cell.trans.createMPITypes();
836 cell2cell.trans.pullOnce();
839 if (isPeriodic)
842 }
843 // if(mpi.rank == 0)
844 // std::cout <<"XXXXXXXXXXXXXXXXXXXXXXXXX" <<std::endl;
845 // if(mpi.rank == 0)
846 // for(index iC = 0; iC < coords.father->Size(); iC ++)
847 // std::cout << coords.father->operator[](iC).transpose() << std::endl;
848
849 /********************************/
850 // cells done, go on to nodes
851 {
854
855 coords.trans.createFatherGlobalMapping();
856
857 std::vector<DNDS::index> ghostNodes;
858 for (DNDS::index iCell = 0; iCell < cell2cell.Size(); iCell++) // note doing full (son + father) traverse
859 {
861 {
862 auto iNode = cell2node(iCell, ic2c);
863 DNDS::MPI_int rank;
864 DNDS::index val;
865 if (!coords.trans.pLGlobalMapping->search(iNode, rank, val))
866 DNDS_assert_info(false, "search failed");
867 if (rank != mpi.rank)
868 ghostNodes.push_back(iNode);
869 }
870 }
871 coords.trans.createGhostMapping(ghostNodes);
872 coords.trans.createMPITypes();
873 coords.trans.pullOnce();
875 }
876 // if(mpi.rank == 0)
877 // std::cout <<"XXXXXXXXXXXXXXXXXXXXXXXXX" <<std::endl;
878 // if(mpi.rank == 0)
879 // for(index iC = 0; iC < coords.Size(); iC ++)
880 // std::cout << coords.operator[](iC).transpose() << std::endl;
881
882 /********************************/
883 // bnds: added via node2bnd's father part
884 {
889 if (isPeriodic)
893
894 bnd2cell.trans.createFatherGlobalMapping();
895
896 std::vector<DNDS::index> ghostBnds;
897 for (index iNode = 0; iNode < node2bnd.father->Size(); iNode++)
898 for (auto iBnd : node2bnd[iNode])
899 {
900 auto [ret, rank, val] = bnd2cell.trans.pLGlobalMapping->search(iBnd);
901 DNDS_assert_info(ret, "search failed");
902 if (rank != mpi.rank)
903 ghostBnds.push_back(iBnd);
904 }
905 bnd2cell.trans.createGhostMapping(ghostBnds);
906 bnd2cell.trans.createMPITypes();
907 bnd2cell.trans.pullOnce();
909 if (isPeriodic)
913
914 // Ghost bnds may reference nodes not yet in the coord ghost layer.
915 // Add those nodes so that AdjGlobal2LocalPrimary can convert bnd2node.
916 // This is collective: all ranks must participate even if some have no extras.
917 {
918 std::vector<DNDS::index> extraGhostNodes;
919 for (DNDS::index iBnd = bnd2node.father->Size(); iBnd < bnd2node.Size(); iBnd++)
920 for (DNDS::rowsize j = 0; j < bnd2node.RowSize(iBnd); j++)
921 {
922 auto iNode = bnd2node(iBnd, j);
923 DNDS::MPI_int rank;
924 DNDS::index val;
925 if (!coords.trans.pLGhostMapping->search_indexAppend(iNode, rank, val))
926 extraGhostNodes.push_back(iNode);
927 }
931 if (nExtraGlobal > 0)
932 {
933 // Rebuild coord ghost mapping with the additional nodes
934 auto &existingGhost = coords.trans.pLGhostMapping->ghostIndex;
935 std::vector<DNDS::index> allGhostNodes(existingGhost.begin(), existingGhost.end());
936 allGhostNodes.insert(allGhostNodes.end(), extraGhostNodes.begin(), extraGhostNodes.end());
937 coords.trans.createGhostMapping(allGhostNodes);
938 node2nodeOrig.trans.BorrowGGIndexing(coords.trans);
939 coords.trans.createMPITypes();
940 node2nodeOrig.trans.createMPITypes();
941 coords.trans.pullOnce();
942 node2nodeOrig.trans.pullOnce();
943 }
944 }
945 }
946 }
947
950 {
951 // needs results of BuildGhostPrimary()
953 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
954 DNDS_assert_info(coords.trans.pLGhostMapping, "trans of coords needed but not built");
955
956 /**********************************/
957 // convert bnd2cell, bnd2node, cell2cell, cell2node ptrs global to local
959 [&](index v) { return CellIndexGlobal2Local(v); });
960
961 for (DNDS::index iBnd = 0; iBnd < bnd2cell.Size(); iBnd++)
962 for (DNDS::rowsize j = 0; j < bnd2cell.RowSize(iBnd); j++)
964 DNDS_assert(j == 0 ? (iBnd < bnd2cell.father->Size() ? bnd2cell(iBnd, j) >= 0 : true) : true); // father bnds' owner cell must be inside
965
967 [&](index v) { auto r = NodeIndexGlobal2Local(v); DNDS_assert(r >= 0); return r; });
968
970 [&](index v) { auto r = NodeIndexGlobal2Local(v); DNDS_assert(r >= 0); return r; });
971 /**********************************/
973 }
974
977 {
979 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
980 DNDS_assert_info(coords.trans.pLGhostMapping, "trans of coords needed but not built");
981
982 /**********************************/
983 // convert bnd2cell, bnd2node, cell2cell, cell2node ptrs local to global
985 [&](index v) { return CellIndexLocal2Global(v); });
986
987 for (DNDS::index iBnd = 0; iBnd < bnd2cell.Size(); iBnd++)
988 for (DNDS::rowsize j = 0; j < bnd2cell.RowSize(iBnd); j++)
990 DNDS_assert(j == 0 ? (iBnd < bnd2cell.father->Size() ? bnd2cell(iBnd, j) >= 0 : true) : true); // father bnds' owner cell must be inside
991
993 [&](index v) { auto r = NodeIndexLocal2Global(v); DNDS_assert(r >= 0); return r; });
994
996 [&](index v) { auto r = NodeIndexLocal2Global(v); DNDS_assert(r >= 0); return r; });
997 /**********************************/
999 }
1000
1002 AdjGlobal2LocalPrimaryForBnd() // a reduction of primary version
1003 {
1004 // needs results of BuildGhostPrimary()
1006 /**********************************/
1007 // convert cell2node ptrs global to local
1009 [&](index v) { auto r = NodeIndexGlobal2Local(v); DNDS_assert(r >= 0); return r; });
1010 /**********************************/
1012 }
1013
1015 AdjLocal2GlobalPrimaryForBnd() // a reduction of primary version
1016 {
1018 /**********************************/
1019 // convert cell2node ptrs local to global
1021 [&](index v) { auto r = NodeIndexLocal2Global(v); DNDS_assert(r >= 0); return r; });
1022 /**********************************/
1024 }
1025
1028 {
1030 /**********************************/
1031 // convert face2cell ptrs and face2node ptrs global to local
1032#ifdef DNDS_USE_OMP
1033# pragma omp parallel for
1034#endif
1035 for (DNDS::index iFace = 0; iFace < face2cell.Size(); iFace++)
1036 {
1037 for (rowsize if2n = 0; if2n < face2node.RowSize(iFace); if2n++)
1038 {
1040 index val;
1041 MPI_int rank;
1042 auto ret = coords.trans.pLGhostMapping->search_indexAppend(iNode, rank, val);
1044 iNode = val;
1045 }
1046 for (rowsize if2c = 0; if2c < 2; if2c++)
1047 {
1049 if (iCell != UnInitIndex) // is not a bnd
1050 {
1051 index val;
1052 MPI_int rank;
1053 auto ret = cell2node.trans.pLGhostMapping->search_indexAppend(iCell, rank, val);
1055 iCell = val;
1056 }
1057 }
1058 }
1059 /**********************************/
1061 }
1062
1065 {
1067 /**********************************/
1068 // convert face2cell ptrs and face2node ptrs to global
1069#ifdef DNDS_USE_OMP
1070# pragma omp parallel for
1071#endif
1072 for (DNDS::index iFace = 0; iFace < face2cell.Size(); iFace++)
1073 {
1074 for (rowsize if2n = 0; if2n < face2node.RowSize(iFace); if2n++)
1075 {
1077 iNode = coords.trans.pLGhostMapping->operator()(-1, iNode);
1078 }
1079 for (rowsize if2c = 0; if2c < 2; if2c++)
1080 {
1082 if (iCell != UnInitIndex) // is not a bnd
1083 iCell = cell2node.trans.pLGhostMapping->operator()(-1, iCell);
1084 }
1086 iBnd = this->BndIndexGlobal2Local(iBnd);
1087 }
1088 // MPI::Barrier(mpi.comm);
1089 /**********************************/
1091 }
1092
1095 {
1097 /**********************************/
1098 // convert cell2face
1099
1101 {
1102 DNDS_assert(face2node.trans.pLGhostMapping);
1103 if (iF == UnInitIndex)
1104 return;
1105 if (iF < 0) // mapping to un-found in father-son
1106 iF = -1 - iF;
1107 else
1108 iF = face2node.trans.pLGhostMapping->operator()(-1, iF);
1109 };
1110
1111#ifdef DNDS_USE_OMP
1112# pragma omp parallel for
1113#endif
1114 for (DNDS::index iCell = 0; iCell < cell2face.Size(); iCell++)
1115 {
1116 for (rowsize ic2f = 0; ic2f < cell2face.RowSize(iCell); ic2f++)
1117 {
1120 }
1121 }
1122 for (index iBnd = 0; iBnd < bnd2face.Size(); iBnd++)
1123 for (auto &iFace : bnd2face[iBnd])
1125 // MPI::Barrier(mpi.comm);
1126 /**********************************/
1128 }
1129
1132 {
1134 /**********************************/
1135 // convert cell2face
1136
1138 {
1139 DNDS_assert(face2node.trans.pLGhostMapping);
1140 if (iF == UnInitIndex)
1141 return;
1142 DNDS::MPI_int rank;
1143 DNDS::index val;
1144 auto result = face2node.trans.pLGhostMapping->search_indexAppend(iF, rank, val);
1145 if (result)
1146 iF = val;
1147 else
1148 iF = -1 - iF; // mapping to un-found in father-son
1149 };
1150
1151#ifdef DNDS_USE_OMP
1152# pragma omp parallel for
1153#endif
1154 for (DNDS::index iCell = 0; iCell < cell2face.Size(); iCell++)
1155 {
1156 for (rowsize ic2f = 0; ic2f < cell2face.RowSize(iCell); ic2f++)
1157 {
1160 }
1161 }
1162 for (index iBnd = 0; iBnd < bnd2face.Size(); iBnd++)
1163 for (auto &iFace : bnd2face[iBnd])
1165 /**********************************/
1167 }
1168
1171 {
1173
1174 DNDS_assert(coords.trans.father && coords.trans.pLGhostMapping);
1175
1177 node2cell.trans.BorrowGGIndexing(coords.trans);
1178 node2cell.trans.createMPITypes();
1179 node2cell.trans.pullOnce();
1180
1182 node2bnd.trans.BorrowGGIndexing(coords.trans);
1183 node2bnd.trans.createMPITypes();
1184 node2bnd.trans.pullOnce();
1185 }
1186
1189 {
1191 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
1192 DNDS_assert_info(bndElemInfo.trans.pLGhostMapping, "trans of bndElemInfo needed but not built");
1193 /**********************************/
1194#ifdef DNDS_USE_OMP
1195# pragma omp parallel for
1196#endif
1197 for (index iNode = 0; iNode < node2cell.Size(); iNode++)
1198 for (index &iCell : node2cell[iNode])
1200
1201#ifdef DNDS_USE_OMP
1202# pragma omp parallel for
1203#endif
1204 for (index iNode = 0; iNode < node2bnd.Size(); iNode++)
1205 for (index &iBnd : node2bnd[iNode])
1206 iBnd = BndIndexLocal2Global(iBnd); // ! bnd has son now
1207
1208 // MPI::Barrier(mpi.comm);
1209 /**********************************/
1211 }
1212
1215 {
1217 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
1218 DNDS_assert_info(bndElemInfo.trans.pLGhostMapping, "trans of bndElemInfo needed but not built");
1219 /**********************************/
1220// todo: ensure using dist omp settings not single!
1221#ifdef DNDS_USE_OMP
1222# pragma omp parallel for
1223#endif
1224 for (index iNode = 0; iNode < node2cell.Size(); iNode++)
1225 for (index &iCell : node2cell[iNode])
1227
1228#ifdef DNDS_USE_OMP
1229# pragma omp parallel for
1230#endif
1231 for (index iNode = 0; iNode < node2bnd.Size(); iNode++)
1232 for (index &iBnd : node2bnd[iNode])
1233 iBnd = BndIndexGlobal2Local(iBnd); // ! bnd has son now
1234
1235 // MPI::Barrier(mpi.comm);
1236 /**********************************/
1238 }
1239
1241 {
1242 for (index iNode = NumNode(); iNode < NumNodeProc(); iNode++)
1243 {
1244 int nCellAdjIn = 0;
1245 for (auto iCell : node2cell[iNode])
1246 if (iCell >= 0)
1247 {
1249 nCellAdjIn++;
1250 for (auto iNodeOther : cell2node[iCell])
1251 {
1253 }
1254 }
1256 }
1257
1258 std::set<index> bnd_main_nodes;
1259 for (index iNode = 0; iNode < NumNode(); iNode++)
1260 {
1261 for (auto iCell : node2cell[iNode])
1262 {
1263 DNDS_assert(iCell < NumCellProc() && iCell >= 0);
1264 for (auto iNodeOther : cell2node[iCell])
1265 {
1267 if (iNodeOther >= NumNode())
1268 bnd_main_nodes.insert(iNode);
1269 }
1270 }
1271 }
1272 std::map<index, int> bnd_main_nodes_adj_ghost_num;
1273 for (index iNode : bnd_main_nodes)
1275 for (index iNode = NumNode(); iNode < NumNodeProc(); iNode++)
1276 for (auto iCell : node2cell[iNode])
1277 if (iCell >= 0)
1278 for (auto iNodeOther : cell2node[iCell])
1279 if (iNodeOther >= 0 && iNodeOther < NumNode())
1283 }
1284
1286 {
1289 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
1290
1291 cell2cellFace.InitPair("cell2cellFace", mpi);
1292 cell2cellFace.father->Resize(this->NumCell());
1293 for (index iCell = 0; iCell < this->NumCell(); iCell++)
1294 {
1296 for (rowsize ic2f = 0; ic2f < cell2face[iCell].size(); ic2f++)
1297 {
1301 cell2cellFace[iCell][ic2f] = this->CellIndexLocal2Global(iCellOther);
1302 }
1303 }
1304 cell2cellFace.father->Compress();
1306 cell2cellFace.trans.BorrowGGIndexing(cell2node.trans);
1307 cell2cellFace.trans.createMPITypes();
1308 cell2cellFace.trans.pullOnce(); // warning! to pull the adj, must be in global state!
1310 }
1311
1313 {
1314 // needs results of BuildGhostPrimary()
1316 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
1317
1318 /**********************************/
1320 [&](index v) { return CellIndexLocal2Global(v); });
1321 /**********************************/
1322
1324 }
1325
1327 {
1328 // needs results of BuildGhostPrimary()
1330 DNDS_assert_info(cellElemInfo.trans.pLGhostMapping, "trans of cellElemInfo needed but not built");
1331
1332 /**********************************/
1334 [&](index v) { return CellIndexGlobal2Local(v); });
1335 /**********************************/
1336
1338 }
1339
1340 // =================================================================
1341 // File-local helpers for InterpolateFace
1342 // =================================================================
1343 namespace
1344 {
1345 /**
1346 * \brief Result of face enumeration from cell connectivity.
1347 */
1348 struct FaceEnumerationResult
1349 {
1350 std::vector<std::vector<DNDS::index>> face2nodeV;
1351 std::vector<std::vector<NodePeriodicBits>> face2nodePbiV;
1352 std::vector<std::pair<DNDS::index, DNDS::index>> face2cellV;
1353 std::vector<ElemInfo> faceElemInfoV;
1355 };
1356
1357 /**
1358 * \brief Enumerate unique faces from cell-to-node connectivity.
1359 *
1360 * Iterates over all cells (local + ghost), extracts topological faces,
1361 * deduplicates by sorted vertex comparison (with periodic-aware matching
1362 * when \p isPeriodic is true), and populates \p cell2face entries.
1363 *
1364 * \param[in,out] cell2face Cell-to-face pair (rows resized and entries set).
1365 * \param[in] cellElemInfo Cell element info (for element type).
1366 * \param[in] cell2node Cell-to-node connectivity.
1367 * \param[in] cell2nodePbi Cell-to-node periodic bits (used when isPeriodic).
1368 * \param[in] cell2cellSize Total number of cells (father + son).
1369 * \param[in] coordsSize Total number of nodes (father + son), for node2face sizing.
1370 * \param[in] isPeriodic Whether periodic mesh handling is enabled.
1371 */
1372 FaceEnumerationResult EnumerateFacesFromCells(
1373 tAdjPair &cell2face,
1374 const tElemInfoArrayPair &cellElemInfo,
1375 const tAdjPair &cell2node,
1376 const tPbiPair &cell2nodePbi,
1377 DNDS::index cell2cellSize,
1378 DNDS::index coordsSize,
1379 bool isPeriodic)
1380 {
1381 FaceEnumerationResult result;
1382 auto &face2nodeV = result.face2nodeV;
1383 auto &face2nodePbiV = result.face2nodePbiV;
1384 auto &face2cellV = result.face2cellV;
1385 auto &faceElemInfoV = result.faceElemInfoV;
1386 auto &nFaces = result.nFaces;
1387
1388 std::vector<std::vector<DNDS::index>> node2face(coordsSize);
1389
1390 using idx_pbi_pair = std::pair<index, NodePeriodicBits>;
1391 auto idx_pbi_pair_less = [](idx_pbi_pair &L, idx_pbi_pair &R)
1392 { return L.first == R.first ? uint8_t(L.second) < uint8_t(R.second) : L.first < R.first; };
1393 auto idx_pbi_pair_eq = [](idx_pbi_pair &L, idx_pbi_pair &R)
1394 { return L.first == R.first && uint8_t(L.second) == uint8_t(R.second); };
1395
1396 for (DNDS::index iCell = 0; iCell < cell2cellSize; iCell++)
1397 {
1398 auto eCell = Elem::Element{cellElemInfo[iCell]->getElemType()};
1399 cell2face.ResizeRow(iCell, eCell.GetNumFaces());
1400 for (int ic2f = 0; ic2f < eCell.GetNumFaces(); ic2f++)
1401 {
1402 auto eFace = eCell.ObtainFace(ic2f);
1403 std::vector<DNDS::index> faceNodes(eFace.GetNumNodes());
1404 eCell.ExtractFaceNodes(ic2f, cell2node[iCell], faceNodes);
1405 DNDS::index iFound = -1;
1406 std::vector<DNDS::index> faceVerts(faceNodes.begin(), faceNodes.begin() + eFace.GetNumVertices());
1407 std::sort(faceVerts.begin(), faceVerts.end());
1408
1409 std::vector<NodePeriodicBits> faceNodePeriodicBits;
1410 std::vector<idx_pbi_pair> faceNodeNodePeriodicBits;
1411 if (isPeriodic)
1412 {
1413 faceNodePeriodicBits.resize(eFace.GetNumNodes());
1414 faceNodeNodePeriodicBits.resize(eFace.GetNumNodes());
1415 eCell.ExtractFaceNodes(ic2f, cell2nodePbi[iCell], faceNodePeriodicBits);
1416 for (size_t i = 0; i < faceNodeNodePeriodicBits.size(); i++)
1417 faceNodeNodePeriodicBits[i].first = faceNodes[i], faceNodeNodePeriodicBits[i].second = faceNodePeriodicBits[i];
1418 std::sort(faceNodeNodePeriodicBits.begin(), faceNodeNodePeriodicBits.end(),
1419 idx_pbi_pair_less);
1420 for (size_t i = 1; i < faceNodeNodePeriodicBits.size(); i++)
1421 {
1422 DNDS_assert_info(!idx_pbi_pair_eq(faceNodeNodePeriodicBits[i - 1], faceNodeNodePeriodicBits[i]),
1423 "the face has identical (periodic) nodes");
1424 }
1425 }
1426
1427 for (auto iV : faceVerts)
1428 if (iFound < 0)
1429 for (auto iFOther : node2face[iV])
1430 {
1431 auto eFaceOther = Elem::Element{faceElemInfoV[iFOther].getElemType()};
1432 if (eFaceOther.type != eFace.type)
1433 continue;
1434 std::vector<DNDS::index> faceVertsOther(
1435 face2nodeV[iFOther].begin(),
1436 face2nodeV[iFOther].begin() + eFace.GetNumVertices());
1437 std::sort(faceVertsOther.begin(), faceVertsOther.end());
1438 if (std::equal(faceVerts.begin(), faceVerts.end(), faceVertsOther.begin(), faceVertsOther.end()))
1439 {
1440 if (isPeriodic)
1441 {
1442 std::vector<std::pair<index, NodePeriodicBits>> faceNodeNodePeriodicBitsOther(eFaceOther.GetNumNodes());
1443 for (size_t i = 0; i < faceNodeNodePeriodicBitsOther.size(); i++)
1444 faceNodeNodePeriodicBitsOther[i].first = face2nodeV[iFOther][i],
1445 faceNodeNodePeriodicBitsOther[i].second = face2nodePbiV[iFOther][i];
1446 std::sort(faceNodeNodePeriodicBitsOther.begin(), faceNodeNodePeriodicBitsOther.end(),
1447 idx_pbi_pair_less);
1448 auto v0 = faceNodeNodePeriodicBits.at(0).second ^ faceNodeNodePeriodicBitsOther.at(0).second;
1449 DNDS_assert(faceNodeNodePeriodicBitsOther.size() == faceNodeNodePeriodicBits.size());
1450 bool collaborating = true;
1451 for (size_t i = 1; i < faceNodeNodePeriodicBitsOther.size(); i++)
1452 if ((faceNodeNodePeriodicBits[i].second ^ faceNodeNodePeriodicBitsOther[i].second) != v0)
1453 collaborating = false;
1454 if (collaborating)
1455 iFound = iFOther;
1456 }
1457 else
1458 iFound = iFOther;
1459 }
1460 }
1461 if (iFound < 0)
1462 {
1463 // * face not existent yet
1464 face2nodeV.emplace_back(faceNodes); // note: faceVerts invalid here!
1465 if (isPeriodic)
1466 face2nodePbiV.emplace_back(faceNodePeriodicBits);
1467 face2cellV.emplace_back(std::make_pair(iCell, DNDS::UnInitIndex));
1468 // important note: f2nPbi node pbi is always same as cell f2c[0]'s corresponding nodes
1469 faceElemInfoV.emplace_back(ElemInfo{eFace.type, 0});
1470 for (auto iV : faceVerts)
1471 node2face[iV].push_back(nFaces);
1472 cell2face(iCell, ic2f) = nFaces;
1473 nFaces++;
1474 }
1475 else
1476 {
1477 DNDS_assert(face2cellV[iFound].second == DNDS::UnInitIndex);
1478 face2cellV[iFound].second = iCell;
1479 cell2face(iCell, ic2f) = iFound;
1480 }
1481 }
1482 }
1483 return result;
1484 }
1485
1486 /**
1487 * \brief Result of face collection/filtering pass.
1488 */
1489 struct FaceCollectionResult
1490 {
1491 std::vector<index> iFaceAllToCollected; ///< mapping: old face idx -> collected idx (or UnInitIndex/-1)
1492 std::vector<std::vector<index>> faceSendLocals; ///< per-rank lists of local face indices to send as ghost
1494 };
1495
1496 /**
1497 * \brief Filter faces: discard ghost-only and duplicate cross-rank faces,
1498 * assign ownership to the rank with the lower rank ID.
1499 *
1500 * \param[in] faceElemInfoV Per-face element info (zone for internal/bnd distinction).
1501 * \param[in] face2cellV Per-face cell pair (first, second).
1502 * \param[in] nFaces Total enumerated faces.
1503 * \param[in] nLocalCells Number of local (father) cells.
1504 * \param[in] pLGhostMapping Ghost mapping from cell2node.trans (for global index lookup).
1505 * \param[in] pLGlobalMapping Global mapping from cell2node.father (for rank search).
1506 * \param[in] nMPIRanks Number of MPI ranks.
1507 * \param[in] myRank This process's MPI rank.
1508 */
1509 FaceCollectionResult CollectFaces(
1510 const std::vector<ElemInfo> &faceElemInfoV,
1511 const std::vector<std::pair<DNDS::index, DNDS::index>> &face2cellV,
1513 DNDS::index nLocalCells,
1514 const DNDS::OffsetAscendIndexMapping &ghostMapping,
1515 const DNDS::GlobalOffsetsMapping &globalMapping,
1516 DNDS::MPI_int nMPIRanks,
1517 DNDS::MPI_int myRank)
1518 {
1519 FaceCollectionResult result;
1520 result.iFaceAllToCollected.resize(nFaces);
1521 result.faceSendLocals.resize(nMPIRanks);
1522 result.nFacesNew = 0;
1523
1524 for (index iFace = 0; iFace < nFaces; iFace++)
1525 {
1526 if (faceElemInfoV[iFace].zone <= 0) // if internal
1527 {
1528 if (face2cellV[iFace].second == UnInitIndex && face2cellV[iFace].first >= nLocalCells) // has not other cell with ghost parent
1529 result.iFaceAllToCollected[iFace] = UnInitIndex; // * discard
1530 else if (face2cellV[iFace].first >= nLocalCells &&
1531 face2cellV[iFace].second >= nLocalCells) // both sides ghost
1532 result.iFaceAllToCollected[iFace] = UnInitIndex; // * discard
1533 else if (face2cellV[iFace].first >= nLocalCells ||
1534 face2cellV[iFace].second >= nLocalCells)
1535 {
1536 DNDS_assert(face2cellV[iFace].second >= nLocalCells); // should only be the internal as first
1537 // * check both sided's info //TODO: optimize so that pLGhostMapping returns rank directly ?
1538 index cellGlobL = ghostMapping(-1, face2cellV[iFace].first);
1539 index cellGlobR = ghostMapping(-1, face2cellV[iFace].second);
1540 MPI_int rankL, rankR;
1541 index valL, valR;
1542 auto retL = globalMapping.search(cellGlobL, rankL, valL);
1543 auto retR = globalMapping.search(cellGlobR, rankR, valR);
1544 DNDS_assert(retL && retR && (rankL != rankR));
1545 if (rankL > rankR)
1546 {
1547 result.iFaceAllToCollected[iFace] = -1; // * discard but with ghost
1548 }
1549 else
1550 {
1551 DNDS_assert(rankL == myRank);
1552 result.faceSendLocals[rankR].push_back(result.nFacesNew);
1553 result.iFaceAllToCollected[iFace] = result.nFacesNew++; //*use
1554 }
1555 }
1556 else
1557 {
1558 result.iFaceAllToCollected[iFace] = result.nFacesNew++; //*use
1559 }
1560 }
1561 else // all bnds would be non duplicate
1562 {
1563 result.iFaceAllToCollected[iFace] = result.nFacesNew++; //*use
1564 }
1565 }
1566 return result;
1567 }
1568
1569 /**
1570 * \brief Copy collected (non-discarded) face data from temporary vectors
1571 * into the resized face member arrays, then remap cell2face entries.
1572 *
1573 * \param[in] faceEnum Face enumeration result (temp vectors).
1574 * \param[in] faceCollect Face collection result (mapping + nFacesNew).
1575 * \param[out] face2cell Face-to-cell pair (father resized and filled).
1576 * \param[out] face2node Face-to-node pair (father resized and filled).
1577 * \param[out] face2nodePbi Face-to-node periodic bits pair (if periodic).
1578 * \param[out] faceElemInfo Face element info pair (father resized and filled).
1579 * \param[in,out] cell2face Cell-to-face pair (entries remapped via iFaceAllToCollected).
1580 * \param[in] isPeriodic Whether periodic mesh handling is enabled.
1581 * \param[in] mpiComm MPI communicator for barrier.
1582 */
1583 void CompactFacesAndRemapCell2Face(
1584 const FaceEnumerationResult &faceEnum,
1585 const FaceCollectionResult &faceCollect,
1586 tAdj2Pair &face2cell,
1587 tAdjPair &face2node,
1588 tPbiPair &face2nodePbi,
1589 tElemInfoArrayPair &faceElemInfo,
1590 tAdjPair &cell2face,
1591 bool isPeriodic,
1592 MPI_Comm mpiComm)
1593 {
1594 const auto &face2nodeV = faceEnum.face2nodeV;
1595 const auto &face2nodePbiV = faceEnum.face2nodePbiV;
1596 const auto &face2cellV = faceEnum.face2cellV;
1597 const auto &faceElemInfoV = faceEnum.faceElemInfoV;
1598 const auto &iFaceAllToCollected = faceCollect.iFaceAllToCollected;
1599 index nFacesNew = faceCollect.nFacesNew;
1600 index nFaces = faceEnum.nFaces;
1601
1602 face2cell.father->Resize(nFacesNew);
1603 face2node.father->Resize(nFacesNew);
1604 if (isPeriodic)
1605 face2nodePbi.father->Resize(nFacesNew);
1606 faceElemInfo.father->Resize(nFacesNew); //! considering globally duplicate faces
1607 nFacesNew = 0;
1608 for (DNDS::index iFace = 0; iFace < nFaces; iFace++)
1609 {
1610 if (iFaceAllToCollected[iFace] >= 0) // ! -1 is also ignored!
1611 {
1612 face2node.ResizeRow(nFacesNew, face2nodeV[iFace].size());
1613 for (DNDS::rowsize if2n = 0; if2n < face2node.RowSize(nFacesNew); if2n++)
1614 face2node(nFacesNew, if2n) = face2nodeV[iFace][if2n];
1615 if (isPeriodic)
1616 {
1617 DNDS_assert(face2nodeV[iFace].size() == face2nodePbiV[iFace].size());
1618 face2nodePbi.ResizeRow(nFacesNew, face2nodePbiV[iFace].size());
1619 for (DNDS::rowsize if2n = 0; if2n < face2nodePbi.RowSize(nFacesNew); if2n++)
1620 face2nodePbi(nFacesNew, if2n) = face2nodePbiV[iFace][if2n];
1621 }
1622 face2cell(nFacesNew, 0) = face2cellV[iFace].first;
1623 face2cell(nFacesNew, 1) = face2cellV[iFace].second;
1624 faceElemInfo(nFacesNew, 0) = faceElemInfoV[iFace];
1625 nFacesNew++;
1626 }
1627 }
1628
1629 MPI::Barrier(mpiComm);
1630#ifdef DNDS_USE_OMP
1631# pragma omp parallel for
1632#endif
1633 for (DNDS::index iCell = 0; iCell < cell2face.Size(); iCell++) // convert face indices pointers
1634 {
1635 for (rowsize ic2f = 0; ic2f < cell2face.RowSize(iCell); ic2f++)
1636 {
1637 cell2face(iCell, ic2f) = iFaceAllToCollected[cell2face(iCell, ic2f)]; // Uninit if to discard
1638 }
1639 }
1640 }
1641
1642 /**
1643 * \brief Match boundary elements to faces, populating faceElemInfo zone IDs
1644 * and building bnd2face / face2bnd mappings.
1645 *
1646 * For each boundary, finds the face that shares the same node set via
1647 * the parent cell's cell2face connectivity.
1648 */
1649 void MatchBoundariesToFaces(
1650 const tElemInfoArrayPair &bndElemInfo,
1651 const tAdj2Pair &bnd2cell,
1652 const tAdjPair &bnd2node,
1653 const tAdjPair &cell2face,
1654 const tAdjPair &face2node,
1655 const tAdjPair &cell2node,
1656 tElemInfoArrayPair &faceElemInfo,
1657 std::vector<index> &bnd2faceV,
1658 std::unordered_map<index, index> &face2bndM,
1659 tAdj1Pair &face2bnd,
1660 tAdj1Pair &bnd2face)
1661 {
1662 bnd2faceV.resize(bndElemInfo.father->Size(), -1); // this mapping only uses main (father) part
1663 face2bndM.reserve(bndElemInfo.father->Size());
1664 std::unordered_map<index, index> iFace2iBnd;
1665 for (DNDS::index iBnd = 0; iBnd < bndElemInfo.father->Size(); iBnd++)
1666 {
1667 DNDS::index pCell = bnd2cell(iBnd, 0);
1668 std::vector<DNDS::index> b2nRow = bnd2node[iBnd];
1669 std::sort(b2nRow.begin(), b2nRow.end());
1670 int nFound = 0;
1671 auto faceID = bndElemInfo[iBnd]->zone;
1672 for (int ic2f = 0; ic2f < cell2face.RowSize(pCell); ic2f++)
1673 {
1674 auto iFace = cell2face(pCell, ic2f);
1675 if (iFace < 0) //==-1, pointing to ghost face
1676 continue;
1677 std::vector<DNDS::index> f2nRow = face2node[iFace];
1678 std::sort(f2nRow.begin(), f2nRow.end());
1679 if (std::equal(b2nRow.begin(), b2nRow.end(), f2nRow.begin(), f2nRow.end()))
1680 {
1681 if (iFace2iBnd.count(iFace))
1682 {
1683 DNDS_assert(FaceIDIsPeriodic(faceID)); // only periodic gets to be duplicated
1684 index iBndOther = iFace2iBnd[iFace];
1685 index iCellA = bnd2cell[iBnd][0];
1686 index iCellB = bnd2cell[iBndOther][0];
1687 DNDS_assert(iCellA < cell2node.father->Size()); // both points to local non-ghost cells
1688 DNDS_assert(iCellB < cell2node.father->Size()); // both points to local non-ghost cells
1689 // std::cout << iCellA << " vs " << iCellB << std::endl;
1690 if (iCellA > iCellB) // make face corresponds with f2c[0]'s bnd if possible
1691 continue;
1692 }
1693 iFace2iBnd[iFace] = iBnd;
1694 nFound++; // two things:
1695 // if is periodic, then only gets the bnd info of the main cell's bnd;
1696 // if is external bc, then must be non-ghost face
1697 faceElemInfo(iFace, 0) = bndElemInfo(iBnd, 0);
1698 bnd2faceV[iBnd] = iFace;
1699 face2bndM[iFace] = iBnd;
1701 FaceIDIsPeriodic(faceID),
1702 "bnd elem should have a BC id not interior");
1703 }
1704 }
1705 DNDS_assert(nFound > 0 || (FaceIDIsPeriodic(faceID) && nFound == 0)); // periodic could miss the face
1706 }
1707
1708 face2bnd.father->Resize(face2node.father->Size());
1709 for (index iFace = 0; iFace < face2bnd.father->Size(); iFace++)
1710 face2bnd.father->operator()(iFace) = face2bndM.count(iFace) ? face2bndM[iFace] : UnInitIndex;
1711 bnd2face.father->Resize(bnd2node.father->Size());
1712 for (index iBnd = 0; iBnd < bnd2node.father->Size(); iBnd++)
1713 bnd2face.father->operator()(iBnd) = bnd2faceV.at(iBnd);
1714 }
1715
1716 /**
1717 * \brief Match received ghost faces to cells and assign cell2face entries
1718 * that were previously marked -1 (pointing to ghost).
1719 *
1720 * For each ghost face, iterates over both its cells, finds the matching
1721 * topological face slot in cell2face, and assigns the ghost face index.
1722 */
1723 void AssignGhostFacesToCells(
1724 const tAdj2 &face2cellSon,
1725 const tAdj &face2nodeSon,
1726 const tElemInfoArray &faceElemInfoSon,
1727 tAdjPair &cell2face,
1728 const tAdjPair &cell2node,
1729 const tElemInfoArrayPair &cellElemInfo,
1730 DNDS::index nFatherFaces)
1731 {
1732#ifdef DNDS_USE_OMP
1733# pragma omp parallel for
1734#endif
1735 for (DNDS::index iFace = 0; iFace < face2cellSon->Size(); iFace++) // face2cell points to local now
1736 {
1737 // before: first points to inner, //!relies on the order of setting face2cell
1738 DNDS_assert((*face2cellSon)(iFace, 0) >= cell2node.father->Size());
1739 auto eFace = Elem::Element{(*faceElemInfoSon)(iFace, 0).getElemType()};
1740 auto faceVerts = std::vector<index>((*face2nodeSon)[iFace].begin(), (*face2nodeSon)[iFace].begin() + eFace.GetNumVertices());
1741 std::sort(faceVerts.begin(), faceVerts.end()); //* do not forget to do set operation sort first
1742 for (rowsize if2c = 0; if2c < 2; if2c++)
1743 {
1744 index iCell = (*face2cellSon)(iFace, if2c);
1745 auto cell2faceRow = cell2face[iCell];
1746 auto cellNodes = cell2node[iCell];
1747 auto eCell = Elem::Element{cellElemInfo(iCell, 0).getElemType()};
1748 bool found = false;
1749 for (rowsize ic2f = 0; ic2f < cell2face.RowSize(iCell); ic2f++)
1750 {
1751 auto eFace = eCell.ObtainFace(ic2f);
1752 std::vector<index> faceNodesC(eFace.GetNumNodes());
1753 eCell.ExtractFaceNodes(ic2f, cellNodes, faceNodesC);
1754 std::sort(faceNodesC.begin(), faceNodesC.end());
1755 if (std::includes(faceNodesC.begin(), faceNodesC.end(), faceVerts.begin(), faceVerts.end()))
1756 {
1757 DNDS_assert(cell2face(iCell, ic2f) == -1);
1758 cell2face(iCell, ic2f) = iFace + nFatherFaces; // remember is ghost
1759 found = true;
1760 }
1761 }
1762 DNDS_assert(found);
1763 }
1764 }
1765 }
1766 } // anonymous namespace
1767
1768 /// @todo //TODO: handle periodic cases
1771 {
1772 DNDS_assert(adjPrimaryState == Adj_PointToLocal); // And also should have primary ghost comm
1773
1774 // Allocate face-related array pairs
1775 cell2face.InitPair("cell2face", mpi);
1776 face2cell.InitPair("face2cell", mpi);
1777 face2node.InitPair("face2node", mpi);
1778 if (isPeriodic)
1779 face2nodePbi.InitPair("face2nodePbi", mpi);
1780 faceElemInfo.InitPair("faceElemInfo", mpi);
1781 face2bnd.InitPair("face2bnd", mpi);
1782 bnd2face.InitPair("bnd2face", mpi);
1783
1784 cell2face.father->Resize(cell2cell.father->Size());
1785 cell2face.son->Resize(cell2cell.son->Size());
1786
1787 // Section B: Enumerate unique faces from cell connectivity
1791
1792 // Section C: Filter faces — discard ghost-only and duplicate cross-rank
1794 faceEnum.faceElemInfoV, faceEnum.face2cellV, faceEnum.nFaces,
1795 cell2face.father->Size(),
1796 *cell2node.trans.pLGhostMapping, *cell2node.father->pLGlobalMapping,
1797 mpi.size, mpi.rank);
1798
1799 // Section D: Compact collected faces into member arrays, remap cell2face
1805
1806 // Section E: Match boundary elements to faces
1810
1811 // Section F: Ghost face communication
1812 this->AdjLocal2GlobalFacial();
1813
1814 // Flatten faceSendLocals into CSR format
1815 std::vector<index> faceSendLocalsIdx;
1816 std::vector<index> faceSendLocalsStarts(mpi.size + 1);
1817 faceSendLocalsStarts[0] = 0;
1818 for (MPI_int r = 0; r < mpi.size; r++)
1819 faceSendLocalsStarts[r + 1] = faceSendLocalsStarts[r] + faceCollect.faceSendLocals[r].size();
1821 for (MPI_int r = 0; r < mpi.size; r++)
1822 std::copy(faceCollect.faceSendLocals[r].begin(), faceCollect.faceSendLocals[r].end(),
1824
1825 face2node.father->Compress(); // before comm
1826 if (isPeriodic)
1827 face2nodePbi.father->Compress();
1829 face2cell.trans.createFatherGlobalMapping();
1831 face2cell.trans.createMPITypes();
1832 face2cell.trans.pullOnce();
1834 if (isPeriodic)
1838
1839 this->AdjGlobal2LocalFacial();
1840
1841 // Section G: Assign ghost faces to cell2face entries marked -1
1843 face2cell.son, face2node.son, faceElemInfo.son,
1845 face2cell.father->Size());
1846
1847 cell2face.father->Compress();
1848 cell2face.son->Compress();
1850
1851 // Section H: Communicate cell2face and bnd2face ghost data
1852 this->AdjLocal2GlobalC2F();
1853 cell2face.TransAttach();
1854 cell2face.trans.BorrowGGIndexing(cell2node.trans);
1855 cell2face.trans.createMPITypes();
1856 cell2face.trans.pullOnce();
1858 bnd2face.trans.BorrowGGIndexing(bnd2node.trans);
1859 bnd2face.trans.createMPITypes();
1860 bnd2face.trans.pullOnce();
1861 this->AdjGlobal2LocalC2F();
1862
1863 for (DNDS::index iFace = 0; iFace < faceElemInfo.Size(); iFace++)
1864 {
1866 {
1867 // DNDS_assert(false);
1868 }
1869 }
1870 for (DNDS::index iFace = 0; iFace < faceElemInfo.Size(); iFace++)
1871 {
1873 {
1874 // DNDS_assert(false);
1875 }
1876 }
1877
1878 auto gSize = face2node.father->globalSize(); //! sync call!!!
1879 if (mpi.rank == 0)
1880 log() << "UnstructuredMesh === InterpolateFace: total faces " << gSize << std::endl;
1881 }
1882
1885 {
1886
1887 //* some assertions on faces
1888 std::vector<uint16_t> cCont(cell2cell.Size(), 0); // simulate flux
1889 for (DNDS::index iFace = 0; iFace < faceElemInfo.Size(); iFace++)
1890 {
1891 auto faceID = faceElemInfo(iFace, 0).zone;
1893 {
1894 // if (FaceIDIsPeriodic(faceID))
1895 // {
1896 // // TODO: tend to the case of face is PeriodicDonor with Main in same proc
1897 // continue;
1898 // }
1899 // if (face2cell[iFace][0] < cell2cell.father->Size()) // other side prime cell, periodic also
1901 fmt::format(
1902 "Face {} is internal, but f2c[1] is null, at {},{},{} - {},{},{}", iFace,
1903 coords[face2node[iFace][0]](0),
1904 coords[face2node[iFace][0]](1),
1905 coords[face2node[iFace][0]](2),
1906 face2node[iFace].size() > 1 ? coords[face2node[iFace][1]](0) : 0.,
1907 face2node[iFace].size() > 1 ? coords[face2node[iFace][1]](1) : 0.,
1908 face2node[iFace].size() > 1 ? coords[face2node[iFace][1]](2) : 0.)); // Assert has enough cell donors
1911 cCont[face2cell[iFace][0]]++;
1912 cCont[face2cell[iFace][1]]++;
1913 }
1914 else // a external BC
1915 {
1917 DNDS_assert(face2cell[iFace][0] >= 0 && face2cell[iFace][0] < cell2cell.father->Size());
1918 cCont[face2cell[iFace][0]]++;
1919 }
1920 }
1921 for (DNDS::index iCell = 0; iCell < cellElemInfo.father->Size(); iCell++) // for every non-ghost
1922 {
1923 for (auto iFace : cell2face[iCell])
1924 {
1925 DNDS_assert(iFace >= 0 && iFace < face2cell.Size());
1927 }
1929 }
1930 }
1931
1934 {
1935 // TODO: Make these read/write of mesh independent of ghost part!
1937
1938 auto cwd = serializerP->GetCurrentPath();
1939 serializerP->CreatePath(name);
1940 serializerP->GoToPath(name);
1941
1942 serializerP->WriteString("mesh", "UnstructuredMesh");
1943 serializerP->WriteIndex("dim", dim);
1944 if (serializerP->IsPerRank())
1945 serializerP->WriteIndex("MPIRank", mpi.rank);
1946 serializerP->WriteIndex("MPISize", mpi.size);
1947 serializerP->WriteInt("isPeriodic", isPeriodic);
1948
1950 cell2node.WriteSerialize(serializerP, "cell2node");
1951 // cell2cell.WriteSerialize(serializerP, "cell2cell");
1952 cellElemInfo.WriteSerialize(serializerP, "cellElemInfo");
1953 bnd2node.WriteSerialize(serializerP, "bnd2node");
1954 // bnd2cell.WriteSerialize(serializerP, "bnd2cell");
1955 bndElemInfo.WriteSerialize(serializerP, "bndElemInfo");
1956 if (isPeriodic)
1957 {
1958 cell2nodePbi.WriteSerialize(serializerP, "cell2nodePbi");
1959 bnd2nodePbi.WriteSerialize(serializerP, "bnd2nodePbi");
1960 periodicInfo.WriteSerializer(serializerP, "periodicInfo");
1961 }
1962 bnd2bndOrig.WriteSerialize(serializerP, "bnd2bndOrig");
1963 cell2cellOrig.WriteSerialize(serializerP, "cell2cellOrig");
1964 node2nodeOrig.WriteSerialize(serializerP, "node2nodeOrig");
1965
1966 serializerP->GoToPath(cwd);
1967 }
1968
1971 {
1972 auto cwd = serializerP->GetCurrentPath();
1973 // serializerP->CreatePath(name);//! remember no create!
1974 serializerP->GoToPath(name);
1975
1976 std::string meshRead;
1977 index dimRead{0}, rankRead{0}, sizeRead{0};
1978 int isPeriodicRead;
1979 serializerP->ReadString("mesh", meshRead);
1980 serializerP->ReadIndex("dim", dimRead);
1981 if (serializerP->IsPerRank())
1982 serializerP->ReadIndex("MPIRank", rankRead);
1983 serializerP->ReadIndex("MPISize", sizeRead);
1984 serializerP->ReadInt("isPeriodic", isPeriodicRead);
1986 DNDS_assert(meshRead == "UnstructuredMesh");
1988 DNDS_assert((!serializerP->IsPerRank() || rankRead == mpi.rank) && sizeRead == mpi.size);
1989
1990 // make the empty arrays
1991 coords.InitPair("coords", getMPI());
1992 cellElemInfo.InitPair("cellElemInfo", getMPI());
1993 bndElemInfo.InitPair("bndElemInfo", getMPI());
1994 cell2node.InitPair("cell2node", getMPI());
1995 if (isPeriodic)
1996 {
1997 cell2nodePbi.InitPair("cell2nodePbi", getMPI());
1998 bnd2nodePbi.InitPair("bnd2nodePbi", getMPI());
1999 }
2000 bnd2node.InitPair("bnd2node", getMPI());
2001
2002 bnd2bndOrig.InitPair("bnd2bndOrig", getMPI());
2003 cell2cellOrig.InitPair("cell2cellOrig", getMPI());
2004 node2nodeOrig.InitPair("node2nodeOrig", getMPI());
2005
2006 coords.ReadSerialize(serializerP, "coords");
2007 cell2node.ReadSerialize(serializerP, "cell2node");
2008 // cell2cell.ReadSerialize(serializerP, "cell2cell");
2009 cellElemInfo.ReadSerialize(serializerP, "cellElemInfo");
2010 bnd2node.ReadSerialize(serializerP, "bnd2node");
2011 // bnd2cell.ReadSerialize(serializerP, "bnd2cell");
2012 bndElemInfo.ReadSerialize(serializerP, "bndElemInfo");
2013 if (isPeriodic)
2014 {
2015 cell2nodePbi.ReadSerialize(serializerP, "cell2nodePbi");
2016 bnd2nodePbi.ReadSerialize(serializerP, "bnd2nodePbi");
2017 periodicInfo.ReadSerializer(serializerP, "periodicInfo");
2018 }
2019 bnd2bndOrig.ReadSerialize(serializerP, "bnd2bndOrig");
2020 cell2cellOrig.ReadSerialize(serializerP, "cell2cellOrig");
2021 node2nodeOrig.ReadSerialize(serializerP, "node2nodeOrig");
2022
2023 // after matters:
2024 coords.trans.createMPITypes();
2025 cell2node.trans.createMPITypes();
2026 // cell2cell.trans.createMPITypes();
2027 cellElemInfo.trans.createMPITypes();
2028 bnd2node.trans.createMPITypes();
2029 // bnd2cell.trans.createMPITypes();
2030 bndElemInfo.trans.createMPITypes();
2031 if (isPeriodic)
2032 {
2033 cell2nodePbi.trans.createMPITypes();
2034 bnd2nodePbi.trans.createMPITypes();
2035 }
2036 bnd2bndOrig.trans.createMPITypes();
2037 cell2cellOrig.trans.createMPITypes();
2038 node2nodeOrig.trans.createMPITypes();
2039 adjPrimaryState = Adj_PointToGlobal; // the file is pointing to local
2040
2041 index nCellG = this->NumCellGlobal(); // collective call!
2042 index nNodeG = this->NumNodeGlobal(); // collective call!
2043 index nNodeB = this->NumBndGlobal(); // collective call!
2044 if (mpi.rank == mRank)
2045 {
2046 log() << "UnstructuredMesh === ReadSerialize "
2047 << "Global NumCell [ " << nCellG << " ]" << std::endl;
2048 log() << "UnstructuredMesh === ReadSerialize "
2049 << "Global NumNode [ " << nNodeG << " ]" << std::endl;
2050 log() << "UnstructuredMesh === ReadSerialize "
2051 << "Global NumBnd [ " << nNodeB << " ]" << std::endl;
2052 }
2053 MPISerialDo(mpi, [&]()
2054 { log() << " Rank: " << mpi.rank << " nCell " << this->NumCell() << " nCellGhost " << this->NumCellGhost() << std::endl; });
2055 MPISerialDo(mpi, [&]()
2056 { log() << " Rank: " << mpi.rank << " nNode " << this->NumNode() << " nNodeGhost " << this->NumNodeGhost() << std::endl; });
2057
2058 serializerP->GoToPath(cwd);
2059 }
2060
2062 {
2063 DNDS_assert(bMesh.dim == dim - 1 && bMesh.mpi == mpi);
2064 bMesh.cell2node.InitPair("bMesh.cell2node", mpi);
2065 bMesh.coords.InitPair("bMesh.coords", mpi);
2066 if (isPeriodic)
2067 {
2068 bMesh.isPeriodic = true;
2069 bMesh.periodicInfo = this->periodicInfo;
2070 bMesh.cell2nodePbi.InitPair("bMesh.cell2nodePbi", mpi);
2071 }
2072 bMesh.cellElemInfo.InitPair("bMesh.cellElemInfo", mpi);
2073 bMesh.cell2cellOrig.InitPair("bMesh.cell2cellOrig", mpi);
2074 bMesh.node2nodeOrig.InitPair("bMesh.node2nodeOrig", mpi);
2075
2076 bMesh.bnd2cell.InitPair("bMesh.bnd2cell", mpi); // which will remain 0 sized
2077 bMesh.bnd2node.InitPair("bMesh.bnd2node", mpi); // which will remain 0 sized
2078 bMesh.bndElemInfo.InitPair("bMesh.bndElemInfo", mpi); // which will remain 0 sized
2079 bMesh.bnd2bndOrig.InitPair("bMesh.bnd2bndOrig", mpi); // which will remain 0 sized
2080 if (isPeriodic)
2081 bMesh.bnd2nodePbi.InitPair("bMesh.bnd2nodePbi", mpi); // which will remain 0 sized
2082
2084 node2bndNodeGlobal.InitPair("node2bndNodeGlobal", mpi);
2085
2086 node2bndNodeGlobal.father->Resize(this->NumNode());
2087 node2bndNodeGlobal.TransAttach();
2088 node2bndNodeGlobal.trans.BorrowGGIndexing(coords.trans);
2089 node2bndNodeGlobal.trans.createMPITypes();
2091 {
2092 for (index i = 0; i < node2bndNodeGlobal.Size(); i++)
2093 node2bndNodeGlobal[i] = 0;
2094 for (index iBnd = 0; iBnd < this->NumBnd(); iBnd++)
2095 for (auto iNode : bnd2node.father->operator[](iBnd))
2096 node2bndNodeGlobal[iNode]++; // now stores num-reference
2097
2098 {
2099 auto node2bndNodeGlobalPast = make_ssp<tInd::element_type>(ObjName{"node2bndNodeGlobalPast"}, mpi);
2102 node2bndNodeGlobalPastTrans.createFatherGlobalMapping();
2103 std::vector<index> pushSonSeries(node2bndNodeGlobal.son->Size());
2104 for (size_t i = 0; i < pushSonSeries.size(); i++)
2106 node2bndNodeGlobalPastTrans.createGhostMapping(pushSonSeries, node2bndNodeGlobal.trans.pLGhostMapping->ghostStart);
2107 node2bndNodeGlobalPastTrans.createMPITypes();
2108 node2bndNodeGlobalPastTrans.pullOnce();
2109 DNDS_assert(DNDS::size_to_index(node2bndNodeGlobal.trans.pLGhostMapping->ghostIndex.size()) == node2bndNodeGlobal.son->Size());
2110 DNDS_assert(DNDS::size_to_index(node2bndNodeGlobal.trans.pLGhostMapping->pushingIndexGlobal.size()) == node2bndNodeGlobalPast->Size());
2111 for (index i = 0; i < node2bndNodeGlobalPast->Size(); i++)
2112 {
2113 index iNodeG = node2bndNodeGlobal.trans.pLGhostMapping->pushingIndexGlobal[i]; //?should be right
2114 auto [ret, rank, iNodeL] = node2bndNodeGlobal.trans.pLGlobalMapping->search(iNodeG);
2115 DNDS_assert(ret && rank == node2bndNodeGlobal.father->getMPI().rank); // has to be local main
2116 node2bndNodeGlobal[iNodeL] += (*node2bndNodeGlobalPast)[i];
2117 }
2118 }
2119 {
2120 for (index i = 0; i < node2bndNodeGlobal.father->Size(); i++)
2121 if (node2bndNodeGlobal[i] == 0)
2123 else
2125 }
2129 for (index i = 0; i < node2bndNodeGlobal.Size(); i++)
2130 if (node2bndNodeGlobal[i] >= 0)
2132 }
2133 node2bndNodeGlobal.trans.pullOnce();
2134 bMesh.coords.father->Resize(bndNodeCount);
2135 bMesh.coords.TransAttach();
2136 bMesh.coords.trans.createFatherGlobalMapping();
2137 std::vector<index> bndNodePullingSet;
2138 for (index iNode = 0; iNode < this->NumNodeProc(); iNode++)
2139 {
2140 if (node2bndNodeGlobal[iNode] < 0)
2141 continue;
2142 // then node2bndNodeGlobal[iNode] >= 0, which covers all bnd2node entries
2143 auto [ret, rank, iNodeL_in_bnd] = bMesh.coords.trans.pLGlobalMapping->search(node2bndNodeGlobal[iNode]);
2145 if (rank != node2bndNodeGlobal.father->getMPI().rank)
2146 bndNodePullingSet.push_back(node2bndNodeGlobal[iNode]); // bndNodePullingSet now also covers bnd2node needs
2147 }
2148 bMesh.coords.trans.createGhostMapping(bndNodePullingSet);
2149 bMesh.coords.trans.createMPITypes();
2150 bMesh.node2nodeOrig.father->Resize(bndNodeCount);
2151 bMesh.node2nodeOrig.TransAttach();
2152 bMesh.node2nodeOrig.trans.BorrowGGIndexing(bMesh.coords.trans);
2153 bMesh.node2nodeOrig.trans.createMPITypes();
2154
2155 // std::cout << mpi.rank << ", " << bndNodePullingSet.size() << ", " << bndNodeCount << std::endl;
2156 node2bndNode.resize(this->NumNodeProc(), -1);
2157 bMesh.node2parentNode.resize(bMesh.coords.Size());
2158 for (index iN = 0; iN < node2bndNodeGlobal.Size(); iN++)
2159 node2bndNode.at(iN) = bMesh.NodeIndexGlobal2Local(node2bndNodeGlobal[iN]),
2161 for (size_t iNode = 0; iNode < node2bndNode.size(); iNode++)
2162 if (node2bndNode[iNode] >= 0)
2163 bMesh.node2parentNode.at(node2bndNode[iNode]) = iNode;
2164
2165 // std::cout << mpi.rank << ", " << bndNodeCount << std::endl;
2166 for (index iBNode = 0; iBNode < bMesh.coords.Size(); iBNode++)
2167 bMesh.coords[iBNode] = coords[bMesh.node2parentNode[iBNode]];
2168 bMesh.coords.trans.pullOnce(); // excessive, should be identical
2169 for (index iBNode = 0; iBNode < bMesh.node2nodeOrig.Size(); iBNode++)
2170 bMesh.node2nodeOrig[iBNode] = node2nodeOrig[bMesh.node2parentNode[iBNode]];
2171
2172 index nBndCellUse{0};
2173 for (index iB = 0; iB < this->NumBnd(); iB++)
2174 if (!FaceIDIsPeriodic(this->bndElemInfo(iB, 0).zone))
2175 nBndCellUse++;
2177 if (isPeriodic)
2178 bMesh.cell2nodePbi.father->Resize(nBndCellUse);
2179 bMesh.cell2cellOrig.father->Resize(nBndCellUse);
2180 bMesh.cellElemInfo.father->Resize(nBndCellUse);
2181 bMesh.cell2parentCell.resize(nBndCellUse, -1);
2182 nBndCellUse = 0;
2183 for (index iB = 0; iB < this->NumBnd(); iB++)
2184 {
2185 if (FaceIDIsPeriodic(this->bndElemInfo(iB, 0).zone))
2186 continue;
2187 bMesh.cell2parentCell.at(nBndCellUse) = iB;
2189 if (isPeriodic)
2190 bMesh.cell2nodePbi.ResizeRow(nBndCellUse, bnd2node.RowSize(iB));
2191 bMesh.cellElemInfo(nBndCellUse, 0) = bndElemInfo(iB, 0);
2192 bMesh.cell2cellOrig(nBndCellUse, 0) = bnd2bndOrig(iB, 0);
2193
2194 for (rowsize ib2n = 0; ib2n < bnd2node.RowSize(iB); ib2n++)
2195 {
2196 if (bnd2faceV.at(iB) < 0) // where bnd has not a face!
2197 bMesh.cell2node[nBndCellUse][ib2n] = node2bndNode.at(bnd2node[iB][ib2n]);
2198 else
2199 bMesh.cell2node[nBndCellUse][ib2n] = node2bndNode.at(face2node[bnd2faceV.at(iB)][ib2n]); //* respect the face ordering if possible // this can be omitted if all bnds used are not periodic
2201 if (isPeriodic)
2202 {
2203 if (bnd2faceV.at(iB) < 0) // where bnd has not a face!
2204 bMesh.cell2nodePbi[nBndCellUse][ib2n] = Geom::NodePeriodicBits{}; // a invalid value
2205 else
2206 {
2207 bMesh.cell2nodePbi[nBndCellUse][ib2n] = face2nodePbi[bnd2faceV.at(iB)][ib2n];
2208 }
2209 }
2210 }
2211 nBndCellUse++;
2212 }
2213
2214 bMesh.cell2node.father->Compress();
2215 bMesh.cell2node.father->createGlobalMapping();
2216 bMesh.cell2node.TransAttach();
2217 bMesh.cell2node.trans.createGhostMapping(std::vector<int>{}); // now bnd mesh has no valid cell2cell and ghost cells
2218
2219 // Ensure global mappings exist on all arrays that BuildSerialOut
2220 // (and other consumers) may query via globalSize(). The ghost
2221 // mappings were already set up for coords/node2nodeOrig above;
2222 // cellElemInfo and cell2cellOrig only need the global mapping.
2223 bMesh.cellElemInfo.father->createGlobalMapping();
2224 bMesh.cell2cellOrig.father->createGlobalMapping();
2225
2226 bMesh.adjPrimaryState = Adj_PointToLocal;
2227 if (mpi.rank == mRank)
2228 log() << "UnstructuredMesh === ConstructBndMesh Done" << std::endl;
2229 }
2230
2239
2240 // =================================================================
2241 // File-local helpers for ReorderLocalCells
2242 // =================================================================
2243 namespace
2244 {
2245 /**
2246 * \brief Result of local cell permutation computation.
2247 */
2248 struct CellPermutationResult
2249 {
2250 std::vector<index> cellOld2New;
2251 std::vector<index> cellNew2Old;
2252 std::vector<index> localPartitionStarts;
2255 };
2256
2257 /**
2258 * \brief Compute a cell reordering permutation using Metis partitioning
2259 * with optional inner partitioning and contiguous sorting.
2260 *
2261 * 1. Partition via Metis + RCM.
2262 * 2. Optionally sub-partition each first-level partition.
2263 * 3. Within each partition, sort cells so interior (private) cells come
2264 * before cells that touch ghost neighbors.
2265 * 4. Build inverse permutation.
2266 *
2267 * \param[in] cell2cellFaceV Local face-adjacency graph (no ghost edges).
2268 * \param[in] cell2cell Full cell-to-cell adjacency (with ghost).
2269 * \param[in] nCell Number of local (father) cells.
2270 * \param[in] nParts Number of first-level partitions.
2271 * \param[in] nPartsInner Number of inner partitions per first-level part.
2272 * \param[in] localPartStartsIn Current partition starts (for contiguous sort).
2273 */
2274 CellPermutationResult ComputeCellPermutation(
2275 tLocalMatStruct &cell2cellFaceV,
2276 const tAdjPair &cell2cell,
2277 index nCell,
2278 int nParts,
2279 int nPartsInner)
2280 {
2281 CellPermutationResult result;
2282 result.cellOld2New.resize(nCell, -1);
2283 result.cellNew2Old.resize(nCell);
2284 for (index i = 0; i < nCell; i++)
2285 result.cellNew2Old[i] = i;
2286
2287 result.localPartitionStarts = ReorderSerialAdj_PartitionMetisC(
2288 cell2cellFaceV.begin(),
2289 cell2cellFaceV.end(),
2290 result.cellNew2Old.begin(),
2291 result.cellNew2Old.end(), nParts, 0, nPartsInner <= 1, result.bwOld, result.bwNew);
2292
2293 if (nPartsInner > 1)
2294 {
2295 //! Debug assertions: verify graph invariants around inner partitioning.
2296 auto dbgCheckSubGraphRanges = [&](const char *tag)
2297 {
2298 for (int p = 0; p < static_cast<int>(result.localPartitionStarts.size()) - 1; p++)
2299 {
2300 index pStart = result.localPartitionStarts[p];
2301 index pEnd = result.localPartitionStarts[p + 1];
2302 for (index iC = pStart; iC < pEnd; iC++)
2303 for (auto jC : cell2cellFaceV[iC])
2305 jC >= 0 && jC < nCell,
2306 "%s: partition %d [%lld,%lld): cell %lld has neighbor %lld outside [0,%lld)",
2307 tag, p, (long long)pStart, (long long)pEnd,
2308 (long long)iC, (long long)jC, (long long)nCell);
2309 }
2310 };
2311 auto dbgCheckBidir = [&](const char *tag)
2312 {
2313 for (index iC = 0; iC < nCell; iC++)
2314 for (auto jC : cell2cellFaceV[iC])
2315 {
2316 bool found = false;
2317 for (auto kC : cell2cellFaceV[jC])
2318 if (kC == iC)
2319 {
2320 found = true;
2321 break;
2322 }
2323 DNDS_assert_infof(found,
2324 "%s: edge %lld->%lld exists but reverse %lld->%lld missing",
2325 tag, (long long)iC, (long long)jC, (long long)jC, (long long)iC);
2326 }
2327 };
2328
2329 dbgCheckSubGraphRanges("before inner partitioning");
2330 dbgCheckBidir("before inner partitioning");
2331
2332 //! Each inner call partitions + RCM-reorders a first-level partition's sub-range,
2333 //! passing the full graph so cross-sub-graph references are updated in-place.
2334 for (int iPart = 0; iPart < static_cast<int>(result.localPartitionStarts.size()) - 1; iPart++)
2335 {
2336 index bwOldC{0}, bwNewC{0};
2337 index offset = result.localPartitionStarts[iPart];
2338 index offsetN = result.localPartitionStarts[iPart + 1];
2339 auto inner_parts_start = ReorderSerialAdj_PartitionMetisC(
2340 cell2cellFaceV.begin() + offset,
2341 cell2cellFaceV.begin() + offsetN,
2342 result.cellNew2Old.begin() + offset,
2343 result.cellNew2Old.begin() + offsetN, nPartsInner, offset, true, bwOldC, bwNewC,
2344 cell2cellFaceV.begin(), nCell);
2345 result.bwOld = std::max(result.bwOld, bwOldC);
2346 result.bwNew = std::max(result.bwNew, bwNewC);
2347
2348 dbgCheckSubGraphRanges(fmt::format("after inner part {}", iPart).c_str());
2349 }
2350
2351 dbgCheckBidir("after all inner partitioning");
2352 }
2353
2354 // Contiguous sorting: within each partition, put interior cells before
2355 // cells that touch ghost neighbors.
2356 {
2357 auto cellIsNotPrivate = [&](index iCell)
2358 {
2359 for (auto iCellOther : cell2cell[iCell])
2360 {
2361 if (iCellOther >= nCell)
2362 return 1;
2363 }
2364 return 0;
2365 };
2366 int nLocalParts = result.localPartitionStarts.size() ? static_cast<int>(result.localPartitionStarts.size()) - 1 : 1;
2367 auto localPartStart = [&](int iPart) -> index
2368 { return result.localPartitionStarts.size() ? result.localPartitionStarts.at(iPart) : 0; };
2369 auto localPartEnd = [&](int iPart) -> index
2370 { return result.localPartitionStarts.size() ? result.localPartitionStarts.at(iPart + 1) : nCell; };
2371
2372 std::vector<index> cellNew2Old_new;
2373 cellNew2Old_new.reserve(nCell);
2374 for (index i = 0; i < nCell; i++)
2375 result.cellOld2New[i] /*tmp storage*/ = cellIsNotPrivate(result.cellNew2Old[i]);
2376 for (int iPart = 0; iPart < nLocalParts; iPart++) // we have to keep the local partitions intact
2377 {
2378 for (index i = localPartStart(iPart); i < localPartEnd(iPart); i++)
2379 if (!result.cellOld2New[i])
2380 cellNew2Old_new.push_back(result.cellNew2Old[i]);
2381 for (index i = localPartStart(iPart); i < localPartEnd(iPart); i++)
2382 if (result.cellOld2New[i])
2383 cellNew2Old_new.push_back(result.cellNew2Old[i]);
2384 }
2385
2386 result.cellNew2Old = std::move(cellNew2Old_new);
2387 DNDS_assert(static_cast<index>(result.cellNew2Old.size()) == nCell);
2388 for (auto v : result.cellNew2Old)
2389 DNDS_assert(v < nCell && v >= 0);
2390 }
2391
2392 // Build inverse permutation
2393 std::unordered_set<index> set;
2394 set.reserve(result.cellNew2Old.size());
2395 for (index i = 0; i < nCell; i++)
2396 {
2397 DNDS_assert(set.count(result.cellNew2Old[i]) == 0);
2398 set.insert(result.cellNew2Old[i]);
2399 result.cellOld2New.at(result.cellNew2Old[i]) = i;
2400 }
2401 return result;
2402 }
2403 } // anonymous namespace
2404
2406 {
2408 DNDS_check_throw(int64_t(nParts) * int64_t(nPartsInner) < std::numeric_limits<int>::max());
2409 nParts = std::max(nParts, 1);
2410 nPartsInner = std::max(nPartsInner, 1);
2411
2412 // Section A: Compute cell permutation (Metis partition + contiguous sort)
2416 this->localPartitionStarts = std::move(perm.localPartitionStarts);
2417
2420 if (mpi.rank == mRank)
2421 log()
2422 << fmt::format("UnstructuredMesh === ReorderLocalCells, nPart0 [{}], got reordering, bw [{}] to [{}]", nParts, perm.bwOld, perm.bwNew) << std::endl;
2423
2424 // Section B: Build cellOld2NewArr for MPI communication of new cell indices
2426 cellOld2NewArr.InitPair("cellOld2NewArr", mpi);
2427 cellOld2NewArr.father->Resize(this->NumCell());
2428 for (index iCell = 0; iCell < this->NumCell(); iCell++)
2429 (*cellOld2NewArr.father)[iCell][0] = this->CellIndexLocal2Global_NoSon(perm.cellOld2New.at(iCell)); // this is right but bad syntax
2430 cellOld2NewArr.TransAttach();
2431
2432 std::set<index> cellsNonLocalFull;
2433
2434 auto record_iCellNonLocal = [&](index iCell)
2435 {
2436 if (iCell >= 0 && iCell < cell2node.father->pLGlobalMapping->globalSize())
2437 {
2438 if (std::get<1>(cell2node.father->pLGlobalMapping->search(iCell)) != mpi.rank)
2439 cellsNonLocalFull.insert(iCell);
2440 }
2441 else
2443 };
2444
2445 if (this->adjFacialState != Adj_Unknown)
2446 {
2448 this->AdjLocal2GlobalFacial();
2449 // see face2cell
2450 for (index iF = 0; iF < this->NumFace(); iF++)
2451 for (rowsize if2c = 0; if2c < 2; if2c++)
2453 }
2454 if (this->adjC2FState != Adj_Unknown)
2455 {
2457 this->AdjLocal2GlobalC2F();
2458 }
2459 if (this->adjN2CBState != Adj_Unknown)
2460 {
2462 this->AdjLocal2GlobalN2CB();
2463 // see node2cell
2464 for (index iN = 0; iN < this->NumNode(); iN++)
2465 for (rowsize in2c = 0; in2c < node2cell[iN].size(); in2c++)
2467 }
2468 this->AdjLocal2GlobalPrimary();
2469 // see cell2cell
2470 for (index iC = 0; iC < this->NumCell(); iC++)
2471 for (rowsize ic2c = 0; ic2c < cell2cell[iC].size(); ic2c++)
2473 // see bnd2cell
2474 for (index iB = 0; iB < this->NumBnd(); iB++)
2475 for (rowsize ib2c = 0; ib2c < 2; ib2c++)
2477
2478 for (auto iCellGhost : cell2node.trans.pLGhostMapping->ghostIndex)
2479 cellsNonLocalFull.insert(iCellGhost); // ! should have no effect normally
2480
2481 cellOld2NewArr.trans.createFatherGlobalMapping();
2482 cellOld2NewArr.trans.createGhostMapping(std::vector<index>(cellsNonLocalFull.begin(), cellsNonLocalFull.end()));
2483 cellOld2NewArr.trans.createMPITypes();
2484 cellOld2NewArr.trans.pullOnce();
2485
2486 // Section C: Replace cell indices in RHS of xxx2cell arrays
2488 {
2489 if (iCellOld == UnInitIndex)
2490 return UnInitIndex;
2491 auto [ret, rank, val] = cellOld2NewArr.trans.pLGhostMapping->search_indexAppend(iCellOld);
2493 return cellOld2NewArr(val, 0);
2494 };
2495
2496 if (this->adjFacialState != Adj_Unknown)
2497 for (index iFace = 0; iFace < this->NumFace(); iFace++)
2498 for (index &iCell : face2cell[iFace])
2500 if (this->adjN2CBState != Adj_Unknown)
2501 for (index iNode = 0; iNode < this->NumNode(); iNode++)
2502 for (index &iCell : node2cell[iNode])
2504 for (index iCell = 0; iCell < this->NumCell(); iCell++)
2507 for (index iBnd = 0; iBnd < this->NumBnd(); iBnd++)
2508 for (index &iCell : bnd2cell[iBnd])
2510
2511 // Section D: Pull ghost data for xxx2cell
2512 if (this->adjFacialState != Adj_Unknown)
2513 face2cell.trans.pullOnce();
2514 if (this->adjN2CBState != Adj_Unknown)
2515 node2cell.trans.pullOnce();
2516 cell2cell.trans.pullOnce(); // should be unnecessary
2517 bnd2cell.trans.pullOnce();
2518
2519 // Section E: Permute LHS of cell2xxx arrays
2520 auto cellOld2NewLocal = [&](index iCell) -> index
2521 { return this->CellIndexGlobal2Local_NoSon(cellOld2NewArr(iCell, 0)); };
2522
2526 if (this->adjC2FState != Adj_Unknown)
2527 PermuteRows(cell2face, this->NumCell(), cellOld2NewLocal);
2528 if (this->isPeriodic)
2531
2532 // Section F: Rebuild ghost mappings with new cell indices
2533 { // cell2node
2534 std::vector<index> ghostCellGlobalsNew = cell2node.trans.pLGhostMapping->ghostIndex;
2537 cell2node.trans.createGhostMapping(ghostCellGlobalsNew);
2538 cell2node.trans.createMPITypes();
2539 cell2node.trans.pullOnce();
2540 }
2541 { // cell2cell
2542 cell2cell.trans.BorrowGGIndexing(cell2node.trans);
2543 cell2cell.trans.createMPITypes();
2544 cell2cell.trans.pullOnce();
2545 }
2546 { // cell2cellOrig
2547 cell2cellOrig.trans.BorrowGGIndexing(cell2node.trans);
2548 cell2cellOrig.trans.createMPITypes();
2549 cell2cellOrig.trans.pullOnce();
2550 }
2551 if (this->adjC2FState != Adj_Unknown)
2552 { // cell2face
2553 cell2face.trans.BorrowGGIndexing(cell2node.trans);
2554 cell2face.trans.createMPITypes();
2555 cell2face.trans.pullOnce();
2556 }
2557 if (this->isPeriodic)
2558 { // cell2nodePbi
2559 cell2nodePbi.trans.BorrowGGIndexing(cell2node.trans);
2560 cell2nodePbi.trans.createMPITypes();
2561 cell2nodePbi.trans.pullOnce();
2562 }
2563 { // cellElemInfo
2564 cellElemInfo.trans.BorrowGGIndexing(cell2node.trans);
2565 cellElemInfo.trans.createMPITypes();
2566 cellElemInfo.trans.pullOnce();
2567 }
2568
2569 // Section G: Restore all adjacencies to local indices
2570 if (this->adjFacialState != Adj_Unknown)
2571 {
2572 this->AdjGlobal2LocalFacial();
2573 }
2574 if (this->adjC2FState != Adj_Unknown)
2575 {
2576 this->AdjGlobal2LocalC2F();
2577 }
2578 if (this->adjN2CBState != Adj_Unknown)
2579 {
2580 this->AdjGlobal2LocalN2CB();
2581 }
2582 this->AdjGlobal2LocalPrimary();
2583 if (mpi.rank == mRank)
2584 log() << fmt::format("UnstructuredMesh === ReorderLocalCells finished") << std::endl;
2585 }
2586}
Reserved for template implementations of #DeviceStorage primitives.
Pre-compiled-header style shim that includes the heavy Eigen headers under DNDSR's warning suppressio...
#define DNDS_assert_info(expr, info)
Debug-only assertion with an extra std::string info message.
Definition Errors.hpp:113
#define DNDS_assert(expr)
Debug-only assertion (compiled out when DNDS_NDEBUG is defined). Prints the expression + file/line + ...
Definition Errors.hpp:108
#define DNDS_assert_infof(expr, info,...)
Debug-only assertion with a printf-style format message.
Definition Errors.hpp:118
#define DNDS_check_throw(expr)
Runtime check active in both debug and release builds. Throws std::runtime_error if expr evaluates to...
Definition Errors.hpp:89
std::vector< index > cellNew2Old
Definition Mesh.cpp:2251
std::vector< std::vector< DNDS::index > > face2nodeV
Definition Mesh.cpp:1350
std::vector< index > cellOld2New
Definition Mesh.cpp:2250
DNDS::index nFaces
Definition Mesh.cpp:1354
std::vector< std::pair< DNDS::index, DNDS::index > > face2cellV
Definition Mesh.cpp:1352
std::vector< index > iFaceAllToCollected
mapping: old face idx -> collected idx (or UnInitIndex/-1)
Definition Mesh.cpp:1491
std::vector< std::vector< index > > faceSendLocals
per-rank lists of local face indices to send as ghost
Definition Mesh.cpp:1492
index bwNew
Definition Mesh.cpp:2254
std::vector< index > localPartitionStarts
Definition Mesh.cpp:2252
index nFacesNew
Definition Mesh.cpp:1493
std::vector< std::vector< NodePeriodicBits > > face2nodePbiV
Definition Mesh.cpp:1351
std::vector< ElemInfo > faceElemInfoV
Definition Mesh.cpp:1353
index bwOld
Definition Mesh.cpp:2253
Internal helpers for mesh partition redistribution.
Ghost-communication engine for a father / son ParArray pair.
void createMPITypes()
Collective: build the MPI derived datatypes (or in-situ buffers) that describe the ghost send/recv la...
void pullOnce()
Convenience: init + start + wait + clear a single pull.
void setFatherSon(const t_pArray &n_father, const t_pArray &n_son)
Attach father and son arrays. First setup step.
void createGhostMapping(TPullSet &&pullingIndexGlobal)
create ghost by pulling data
void createFatherGlobalMapping()
Collective: build the global offsets table on the father array.
Table mapping rank-local row indices to the global index space.
bool search(index globalQuery, MPI_int &rank, index &val) const
Convert a global index to (rank, local). Returns false if out of range.
Mapping between a rank's main data and its ghost copies.
void PushInfo2Serial2Global(std::vector< DNDS::index > &serial2Global, DNDS::index localSize, const std::vector< DNDS::index > &pushIndex, const std::vector< DNDS::index > &pushIndexStart, const DNDS::MPIInfo &mpi)
Reserved skeleton for parallel topology interpolation.
Definition Mesh.cpp:71
ArrayPair< ArrayNodePeriodicBits< DNDS::NonUniformSize > > tPbiPair
@ SerialOutput
Definition Mesh.hpp:822
void Partition2LocalIdx(const std::vector< TPartitionIdx > &partition, std::vector< DNDS::index > &localPush, std::vector< DNDS::index > &localPushStart, const DNDS::MPIInfo &mpi)
void Partition2Serial2Global(const std::vector< TPartitionIdx > &partition, std::vector< DNDS::index > &serial2Global, const DNDS::MPIInfo &mpi, DNDS::MPI_int nPart)
decltype(tAdj2Pair::father) tAdj2
DNDS_DEVICE_CALLABLE bool FaceIDIsInternal(t_index id)
std::function< index(index)> tIndexMapFunc
Definition Mesh.cpp:343
decltype(tAdjPair::father) tAdj
DNDS::ArrayAdjacencyPair< 2 > tAdj2Pair
Eigen::Vector3d tPoint
Definition Geometric.hpp:9
DNDS::ArrayPair< DNDS::ParArray< ElemInfo > > tElemInfoArrayPair
DNDS::ssp< DNDS::ParArray< ElemInfo > > tElemInfoArray
DNDS::ArrayAdjacencyPair< 1 > tAdj1Pair
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodic(t_index id)
tGPoint RotX(real theta)
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodicDonor(t_index id)
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodicMain(t_index id)
tGPoint RotZ(real theta)
void TransferDataSerial2Global(TArr &arraySerial, TArr &arrayDist, const std::vector< DNDS::index > &pushIndex, const std::vector< DNDS::index > &pushIndexStart, const DNDS::MPIInfo &mpi)
std::vector< index > ReorderSerialAdj_PartitionMetisC(tLocalMatStruct::iterator mat_begin, tLocalMatStruct::iterator mat_end, std::vector< index >::iterator i_new2old_begin, std::vector< index >::iterator i_new2old_end, int nParts, index ind_offset, bool do_rcm, index &bwOldM, index &bwNewM, tLocalMatStruct::iterator full_mat_begin, index full_n_elem)
Partition a sub-graph into nParts and optionally apply RCM reordering within each part.
void ConvertAdjSerial2Global(TAdj &arraySerialAdj, const std::vector< DNDS::index > &partitionJSerial2Global, const DNDS::MPIInfo &mpi)
DNDS::ArrayAdjacencyPair< DNDS::NonUniformSize > tAdjPair
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodic3(t_index id)
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodic2(t_index id)
DNDS_DEVICE_CALLABLE bool FaceIDIsExternalBC(t_index id)
DNDS_DEVICE_CALLABLE bool FaceIDIsPeriodic1(t_index id)
std::vector< std::vector< index > > tLocalMatStruct
Definition Geometric.hpp:66
tGPoint RotY(real theta)
MPI_int Scan(const void *sendbuf, void *recvbuf, MPI_int count, MPI_Datatype datatype, MPI_Op op, MPI_Comm comm)
Wrapper over MPI_Scan (inclusive prefix reduction).
Definition MPI.cpp:220
MPI_int Allreduce(const void *sendbuf, void *recvbuf, MPI_int count, MPI_Datatype datatype, MPI_Op op, MPI_Comm comm)
Wrapper over MPI_Allreduce.
Definition MPI.cpp:203
MPI_int Barrier(MPI_Comm comm)
Wrapper over MPI_Barrier.
Definition MPI.cpp:248
void AllreduceOneIndex(index &v, MPI_Op op, const MPIInfo &mpi)
Single-scalar Allreduce helper for indices (in-place, count = 1).
Definition MPI.hpp:679
ssp< SerializerBase > SerializerBaseSSP
the host side operators are provided as implemented
const MPI_Datatype DNDS_MPI_INDEX
MPI datatype matching index (= MPI_INT64_T).
Definition MPI.hpp:90
DNDS_CONSTANT const index UnInitIndex
Sentinel "not initialised" index value (= INT64_MIN).
Definition Defines.hpp:176
void MPISerialDo(const MPIInfo &mpi, F f)
Execute f on each rank serially, in rank order.
Definition MPI.hpp:698
int32_t rowsize
Row-width / per-row element-count type (signed 32-bit).
Definition Defines.hpp:109
index size_to_index(size_t v)
Range-checked conversion from size_t to DNDS::index.
Definition Defines.hpp:426
int64_t index
Global row / DOF index type (signed 64-bit; handles multi-billion-cell meshes).
Definition Defines.hpp:107
ssp< T > make_ssp(Args &&...args)
Type-safe replacement for DNDS_MAKE_SSP. Creates ssp<T> with forwarded args.
Definition Defines.hpp:255
std::ostream & log()
Return the current DNDSR log stream (either std::cout or the installed file).
Definition Defines.cpp:49
int MPI_int
MPI counterpart type for MPI_int (= C int). Used for counts and ranks in MPI calls.
Definition MPI.hpp:43
set(LIBNAME cfv) set(LINKS) set(LINKS_SHARED geom_shared dnds_shared $
Definition CMakeLists.txt:5
DNDS_DEVICE_CALLABLE index Size() const
Combined father + son row count.
Definition ArrayPair.hpp:33
t_arrayDeviceView father
Convenience bundle of a father, son, and attached ArrayTransformer.
void TransAttach()
Bind the transformer to the current father / son pointers.
ssp< TArray > father
Owned-side array (must be resized before ghost setup).
void WriteSerialize(Serializer::SerializerBaseSSP serializerP, const std::string &name, bool includePIG=true, bool includeSon=true)
Writes the ArrayPair (father, optional son, optional ghost mapping).
index Size() const
Combined row count (father->Size() + son->Size()).
void InitPair(const std::string &name, Args &&...args)
Allocate both father and son arrays, forwarding all args to TArray constructor.
ssp< TArray > son
Ghost-side array (sized automatically by createMPITypes / BorrowAndPull).
void BorrowAndPull(TPrimaryPair &primary)
Attach, borrow ghost indexing from a primary pair, create MPI types, and pull once.
auto RowSize() const
Uniform row width (delegates to father).
void ResizeRow(index i, rowsize rs)
Resize a single row in the combined address space.
void ReadSerialize(Serializer::SerializerBaseSSP serializerP, const std::string &name, bool includePIG=true, bool includeSon=true)
Reads an ArrayPair written by WriteSerialize (same partition count).
TTrans trans
Ghost-communication engine bound to father and son.
void ObtainSymmetricSymbolicFactorization(const TAdj &cell2cellFaceV, const std::vector< index > localPartStarts_in, int iluCode)
get symmetric symbolic matrix factorization over cell2cellFaceV
Definition Direct.hpp:81
static MPI_Datatype CommType()
static MPI_Datatype CommType()
DNDS_HOST void WriteSerializer(Serializer::SerializerBaseSSP serializerP, const std::string &name)
std::array< tPointPortable, 4 > rotationCenter
std::array< tPointPortable, 4 > translation
std::array< tGPointPortable, 4 > rotation
DNDS_HOST void ReadSerializer(Serializer::SerializerBaseSSP serializerP, const std::string &name)
tAdjPair::t_deviceView< B > cell2node
tCoordPair::t_deviceView< B > coords
reader
DNDS::ArrayTransformerType< tCoord::element_type >::Type coordSerialOutTrans
Definition Mesh.hpp:907
DNDS::ArrayTransformerType< tElemInfoArray::element_type >::Type cellElemInfoSerialOutTrans
Definition Mesh.hpp:911
void BuildSerialOut()
should be called to build data for serial out
Definition Mesh.cpp:203
DNDS::ArrayTransformerType< tAdj::element_type >::Type cell2nodeSerialOutTrans
Definition Mesh.hpp:908
DNDS::ssp< UnstructuredMesh > mesh
Definition Mesh.hpp:856
DNDS::ArrayTransformerType< tPbi::element_type >::Type cell2nodePbiSerialOutTrans
Definition Mesh.hpp:909
std::vector< DNDS::MPI_int > cellPartition
Definition Mesh.hpp:914
std::vector< DNDS::MPI_int > bndPartition
Definition Mesh.hpp:916
std::vector< DNDS::MPI_int > nodePartition
Definition Mesh.hpp:915
MeshAdjState adjC2FState
Definition Mesh.hpp:45
UnstructuredMeshDeviceView< B > t_deviceView
Definition Mesh.hpp:777
index NodeIndexGlobal2Local_NoSon(index i)
Definition Mesh.hpp:295
tPbiPair cell2nodePbi
periodic only, after reader
Definition Mesh.hpp:65
std::unordered_map< index, index > face2bndM
Definition Mesh.hpp:102
index NumNodeProc() const
Definition Mesh.hpp:466
index CellIndexLocal2Global_NoSon(index i)
Definition Mesh.hpp:302
tPbiPair face2nodePbi
periodic only, after interpolated
Definition Mesh.hpp:104
std::vector< index > node2bndNode
Definition Mesh.hpp:128
void BuildGhostPrimary()
building ghost (son) from primary (currently only cell2cell)
Definition Mesh.cpp:802
index NumNodeGhost() const
Definition Mesh.hpp:461
MeshAdjState adjC2CFaceState
Definition Mesh.hpp:49
std::vector< index > node2parentNode
parent built
Definition Mesh.hpp:127
index NumCellProc() const
Definition Mesh.hpp:467
index NodeIndexLocal2Global_NoSon(index i)
Definition Mesh.hpp:294
tLocalMatStruct cell2cellFaceVLocalParts
for cell local factorization
Definition Mesh.hpp:169
index BndIndexLocal2Global_NoSon(index i)
Definition Mesh.hpp:310
tCoordPair coords
reader
Definition Mesh.hpp:54
index CellIndexGlobal2Local(DNDS::index i)
Definition Mesh.hpp:300
tLocalMatStruct GetCell2CellFaceVLocal(bool onLocalPartition=false)
void RecoverNode2CellAndNode2Bnd()
only requires father part of cell2node, bnd2node and coords generates node2cell and node2bnd (father ...
Definition Mesh.cpp:489
MeshAdjState adjN2CBState
Definition Mesh.hpp:47
void ReorderLocalCells(int nParts=1, int nPartsInner=1)
Definition Mesh.cpp:2405
void ReadSerialize(Serializer::SerializerBaseSSP serializerP, const std::string &name)
Definition Mesh.cpp:1970
tAdjPair cell2cellFace
constructed on demand
Definition Mesh.hpp:124
index CellIndexGlobal2Local_NoSon(index i)
Definition Mesh.hpp:303
index BndIndexLocal2Global(DNDS::index i)
Definition Mesh.hpp:309
tAdjPair node2cell
inverse relations
Definition Mesh.hpp:83
std::vector< index > bnd2faceV
Definition Mesh.hpp:101
index CellIndexLocal2Global(DNDS::index i)
Definition Mesh.hpp:301
MeshAdjState adjPrimaryState
Definition Mesh.hpp:41
void ObtainSymmetricSymbolicFactorization(Direct::SerialSymLUStructure &symLU, Direct::DirectPrecControl control) const
Definition Mesh.cpp:2231
index CellFaceOther(index iCell, index iFace) const
Definition Mesh.hpp:624
tElemInfoArrayPair faceElemInfo
Definition Mesh.hpp:97
void SetPeriodicGeometry(const tPoint &translation1, const tPoint &rotationCenter1, const tPoint &eulerAngles1, const tPoint &translation2, const tPoint &rotationCenter2, const tPoint &eulerAngles2, const tPoint &translation3, const tPoint &rotationCenter3, const tPoint &eulerAngles3)
Definition Mesh.cpp:457
std::vector< index > localPartitionStarts
Definition Mesh.hpp:171
MeshAdjState adjFacialState
Definition Mesh.hpp:43
index BndIndexGlobal2Local(DNDS::index i)
Definition Mesh.hpp:308
void RecoverCell2CellAndBnd2Cell()
needs to use RecoverNode2CellAndNode2Bnd before doing this. Requires node2cell.father and builds a ve...
Definition Mesh.cpp:542
void WriteSerialize(Serializer::SerializerBaseSSP serializerP, const std::string &name)
Definition Mesh.cpp:1933
tElemInfoArrayPair cellElemInfo
Definition Mesh.hpp:59
static void ConvertAdjEntries(TAdj &adj, index nRows, TFn &&fn)
Apply a conversion function to every entry of an adjacency array's first nRows rows.
Definition Mesh.hpp:332
index NumCellGhost() const
Definition Mesh.hpp:462
static void PermuteRows(TPair &pair, index nRows, TFn &&old2new)
Permute the father rows of an ArrayPair according to a mapping function.
Definition Mesh.hpp:357
void ConstructBndMesh(UnstructuredMesh &bMesh)
Definition Mesh.cpp:2061
tElemInfoArrayPair bndElemInfo
Definition Mesh.hpp:60
tAdjPair cell2face
interpolated
Definition Mesh.hpp:94
Lightweight bundle of an MPI communicator and the calling rank's coordinates.
Definition MPI.hpp:215
int size
Number of ranks in comm (-1 until initialised).
Definition MPI.hpp:221
int rank
This rank's 0-based index within comm (-1 until initialised).
Definition MPI.hpp:219
MPI_Comm comm
The underlying MPI communicator handle.
Definition MPI.hpp:217
Tag type for naming objects created via make_ssp.
Definition Defines.hpp:249
Eigen::Matrix< real, 5, 1 > v
tVec r(NCells)
tVec b(NCells)
if(g_mpi.rank==0) std auto[rhsDensity, rhsEnergy, incL2]
for(int i=0;i< 10;i++) theta(i
auto result