decompositionMethod.C
Go to the documentation of this file.
1 /*---------------------------------------------------------------------------*\
2  ========= |
3  \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
4  \\ / O peration | Website: https://openfoam.org
5  \\ / A nd | Copyright (C) 2011-2024 OpenFOAM Foundation
6  \\/ M anipulation |
7 -------------------------------------------------------------------------------
8 License
9  This file is part of OpenFOAM.
10 
11  OpenFOAM is free software: you can redistribute it and/or modify it
12  under the terms of the GNU General Public License as published by
13  the Free Software Foundation, either version 3 of the License, or
14  (at your option) any later version.
15 
16  OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
17  ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
18  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19  for more details.
20 
21  You should have received a copy of the GNU General Public License
22  along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
23 
24 \*---------------------------------------------------------------------------*/
25 
26 #include "decompositionMethod.H"
27 #include "Time.H"
28 #include "globalIndex.H"
29 #include "syncTools.H"
30 #include "Tuple2.H"
31 #include "faceSet.H"
32 #include "regionSplit.H"
33 #include "localPointRegion.H"
34 #include "minData.H"
35 #include "FaceCellWave.H"
36 
41 
42 // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
43 
44 namespace Foam
45 {
49 }
50 
51 
52 // * * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * //
53 
55 (
56  const pointField& points,
57  const scalarField& pointWeights
58 ) const
59 {
60  const label localnWeights =
61  points.size() ? pointWeights.size()/points.size() : 0;
62 
63  const label nWeights = returnReduce(localnWeights, maxOp<label>());
64 
65  if (localnWeights && localnWeights != nWeights)
66  {
68  << "Number of weights on this processor " << localnWeights
69  << " does not equal the maximum number of weights " << nWeights
70  << exit(FatalError);
71  }
72 
73  return nWeights;
74 }
75 
76 
78 (
79  const pointField& points,
80  const scalarField& pointWeights
81 ) const
82 {
83  const label nWeights = this->nWeights(points, pointWeights);
84 
85  if (nWeights > 1)
86  {
88  << "decompositionMethod " << type()
89  << " does not support multiple constraints"
90  << exit(FatalError);
91  }
92 
93  return nWeights;
94 }
95 
96 
97 // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
98 
100 (
101  const dictionary& decompositionDict
102 )
103 :
104  nProcessors_(decompositionDict.lookup<label>("numberOfSubdomains"))
105 {
106  // Read any constraints
107  wordList constraintTypes_;
108 
109  if (decompositionDict.found("constraints"))
110  {
111  const dictionary& constraintsList = decompositionDict.subDict
112  (
113  "constraints"
114  );
115  forAllConstIter(dictionary, constraintsList, iter)
116  {
117  const dictionary& dict = iter().dict();
118 
119  constraintTypes_.append(dict.lookup("type"));
120 
121  constraints_.append
122  (
124  (
125  dict,
126  constraintTypes_.last()
127  )
128  );
129  }
130  }
131 }
132 
133 
134 // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
135 
138 (
139  const dictionary& decompositionDict
140 )
141 {
142  const word methodType
143  (
144  decompositionDict.lookupBackwardsCompatible<word>
145  (
146  {"decomposer", "method"}
147  )
148  );
149 
150  Info<< "Selecting decomposer " << methodType << endl;
151 
152  libs.open
153  (
154  decompositionDict,
155  "libs",
156  decomposerConstructorTablePtr_
157  );
158 
159  decomposerConstructorTable::iterator cstrIter =
160  decomposerConstructorTablePtr_->find(methodType);
161 
162  if (cstrIter == decomposerConstructorTablePtr_->end())
163  {
165  << "Unknown decomposer "
166  << methodType << nl << nl
167  << "Valid decomposers are : " << endl
168  << decomposerConstructorTablePtr_->sortedToc()
169  << exit(FatalError);
170  }
171 
172  Info<< incrIndent;
173 
175  (
176  cstrIter()
177  (
178  decompositionDict,
179  decompositionDict.subOrEmptyDict(methodType + "Coeffs")
180  )
181  );
182 
183  Info<< decrIndent;
184 
185  return methodPtr;
186 }
187 
188 
191 (
192  const dictionary& distributionDict
193 )
194 {
195  const word methodType
196  (
197  distributionDict.lookupBackwardsCompatible<word>
198  (
199  {"distributor", "method"}
200  )
201  );
202 
203  Info<< "Selecting distributor " << methodType << endl;
204 
205  libs.open
206  (
207  distributionDict,
208  "libs",
209  distributorConstructorTablePtr_
210  );
211 
212  distributorConstructorTable::iterator cstrIter =
213  distributorConstructorTablePtr_->find(methodType);
214 
215  if (cstrIter == distributorConstructorTablePtr_->end())
216  {
218  << "Unknown distributor "
219  << methodType << nl << nl
220  << "Valid distributors are : " << endl
221  << distributorConstructorTablePtr_->sortedToc()
222  << exit(FatalError);
223  }
224 
225  Info<< incrIndent;
226 
228  (
229  cstrIter()
230  (
231  distributionDict,
232  distributionDict.subOrEmptyDict(methodType + "Coeffs")
233  )
234  );
235 
236  Info<< decrIndent;
237 
238  return methodPtr;
239 }
240 
241 
243 {
244  return IOdictionary
245  (
246  IOobject
247  (
248  "decomposeParDict",
249  time.system(),
250  time,
253  false
254  )
255  );
256 }
257 
258 
260 (
261  const polyMesh& mesh,
262  const pointField& points
263 )
264 {
265  scalarField weights(points.size(), 1.0);
266 
267  return decompose(mesh, points, weights);
268 }
269 
270 
272 (
273  const polyMesh& mesh,
274  const labelList& fineToCoarse,
275  const pointField& coarsePoints,
276  const scalarField& coarseWeights
277 )
278 {
279  CompactListList<label> coarseCellCells;
280  calcCellCells
281  (
282  mesh,
283  fineToCoarse,
284  coarsePoints.size(),
285  true, // use global cell labels
286  coarseCellCells
287  );
288 
289  // Decompose based on agglomerated points
290  labelList coarseDistribution
291  (
292  decompose
293  (
294  coarseCellCells.list(),
295  coarsePoints,
296  coarseWeights
297  )
298  );
299 
300  // Rework back into decomposition for original mesh_
301  labelList fineDistribution(fineToCoarse.size());
302 
303  forAll(fineDistribution, i)
304  {
305  fineDistribution[i] = coarseDistribution[fineToCoarse[i]];
306  }
307 
308  return fineDistribution;
309 }
310 
311 
313 (
314  const polyMesh& mesh,
315  const labelList& fineToCoarse,
316  const pointField& coarsePoints
317 )
318 {
319  scalarField cellWeights(coarsePoints.size(), 1.0);
320 
321  return decompose
322  (
323  mesh,
324  fineToCoarse,
325  coarsePoints,
326  cellWeights
327  );
328 }
329 
330 
332 (
333  const labelListList& globalCellCells,
334  const pointField& cellCentres
335 )
336 {
337  scalarField cellWeights(cellCentres.size(), 1.0);
338 
339  return decompose(globalCellCells, cellCentres, cellWeights);
340 }
341 
342 
344 (
345  const scalarField& weights,
346  label& nWeights,
347  const bool distributed
348 )
349 {
350  labelList intWeights;
351 
352  if (nWeights > 0)
353  {
354  // Calculate the scalar -> integer scaling factor
355  const scalar sumWeights
356  (
357  distributed
358  ? gSum(weights)
359  : sum(weights)
360  );
361 
362  // Hack for scotch which does not accept 64bit label range for weights
363  const scalar scale = INT32_MAX/(2*sumWeights);
364  // const scalar scale = labelMax/(2*sumWeights);
365 
366 
367  // Convert weights to integer
368  intWeights.setSize(weights.size());
369  forAll(intWeights, i)
370  {
371  intWeights[i] = ceil(scale*weights[i]);
372  }
373 
374  /*
375  // Alternatively calculate a separate scaling factor
376  // for each weight, it is not clear from the parMETIS or Scotch manuals
377  // which method should be used.
378  //
379  // Calculate the scalar -> integer scaling factors
380  scalarList sumWeights(nWeights, 0.0);
381 
382  forAll(weights, i)
383  {
384  sumWeights[i % nWeights] += weights[i];
385  }
386 
387  if (distributed)
388  {
389  reduce(sumWeights, ListOp<sumOp<scalar>>());
390  }
391 
392  scalarList scale(nWeights, 0.0);
393  forAll(scale, i)
394  {
395  if (sumWeights[i] > small)
396  {
397  scale[i] = labelMax/(2*sumWeights[i]);
398  }
399  }
400 
401  // Convert weights to integer
402  intWeights.setSize(weights.size());
403  forAll(intWeights, i)
404  {
405  intWeights[i] = ceil(scale[i % nWeights]*weights[i]);
406  }
407  */
408 
409  // Calculate the sum over all processors of each weight
410  labelList sumIntWeights(nWeights, 0);
411  forAll(weights, i)
412  {
413  sumIntWeights[i % nWeights] += intWeights[i];
414  }
415 
416  if (distributed)
417  {
418  reduce(sumIntWeights, ListOp<sumOp<label>>());
419  }
420 
421  // Check that the sum of each weight is non-zero
422  boolList nonZeroWeights(nWeights, false);
423  label nNonZeroWeights = 0;
424  forAll(sumIntWeights, i)
425  {
426  if (sumIntWeights[i] != 0)
427  {
428  nonZeroWeights[i] = true;
429  nNonZeroWeights++;
430  }
431  }
432 
433  // If there are zero weights remove them from the weights list
434  if (nNonZeroWeights != nWeights)
435  {
436  label j = 0;
437  forAll(intWeights, i)
438  {
439  if (nonZeroWeights[i % nWeights])
440  {
441  intWeights[j++] = intWeights[i];
442  }
443  }
444 
445  // Resize the weights list
446  intWeights.setSize(nNonZeroWeights*(intWeights.size()/nWeights));
447 
448  // Reset the number of weight to the number of non-zero weights
449  nWeights = nNonZeroWeights;
450  }
451  }
452 
453  return intWeights;
454 }
455 
456 
458 (
459  const polyMesh& mesh,
460  const labelList& agglom,
461  const label nLocalCoarse,
462  const bool parallel,
463  CompactListList<label>& cellCells
464 )
465 {
466  const labelList& faceOwner = mesh.faceOwner();
467  const labelList& faceNeighbour = mesh.faceNeighbour();
469 
470 
471  // Create global cell numbers
472  // ~~~~~~~~~~~~~~~~~~~~~~~~~~
473 
474  globalIndex globalAgglom
475  (
476  nLocalCoarse,
479  parallel
480  );
481 
482 
483  // Get agglomerate owner on other side of coupled faces
484  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
485 
486  labelList globalNeighbour(mesh.nFaces() - mesh.nInternalFaces());
487 
489  {
490  const polyPatch& pp = patches[patchi];
491 
492  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
493  {
494  label facei = pp.start();
495  label bFacei = pp.start() - mesh.nInternalFaces();
496 
497  forAll(pp, i)
498  {
499  globalNeighbour[bFacei] = globalAgglom.toGlobal
500  (
501  agglom[faceOwner[facei]]
502  );
503 
504  bFacei++;
505  facei++;
506  }
507  }
508  }
509 
510  // Get the cell on the other side of coupled patches
511  syncTools::swapBoundaryFaceList(mesh, globalNeighbour);
512 
513 
514  // Count number of faces (internal + coupled)
515  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
516 
517  // Number of faces per coarse cell
518  labelList nFacesPerCell(nLocalCoarse, 0);
519 
520  for (label facei = 0; facei < mesh.nInternalFaces(); facei++)
521  {
522  label own = agglom[faceOwner[facei]];
523  label nei = agglom[faceNeighbour[facei]];
524 
525  nFacesPerCell[own]++;
526  nFacesPerCell[nei]++;
527  }
528 
530  {
531  const polyPatch& pp = patches[patchi];
532 
533  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
534  {
535  label facei = pp.start();
536  label bFacei = pp.start() - mesh.nInternalFaces();
537 
538  forAll(pp, i)
539  {
540  label own = agglom[faceOwner[facei]];
541 
542  label globalNei = globalNeighbour[bFacei];
543  if
544  (
545  !globalAgglom.isLocal(globalNei)
546  || globalAgglom.toLocal(globalNei) != own
547  )
548  {
549  nFacesPerCell[own]++;
550  }
551 
552  facei++;
553  bFacei++;
554  }
555  }
556  }
557 
558 
559  // Fill in offset and data
560  // ~~~~~~~~~~~~~~~~~~~~~~~
561 
562  cellCells.setSize(nFacesPerCell);
563 
564  nFacesPerCell = 0;
565 
566  labelUList& m = cellCells.m();
567  const labelList& offsets = cellCells.offsets();
568 
569  // For internal faces is just offsetted owner and neighbour
570  for (label facei = 0; facei < mesh.nInternalFaces(); facei++)
571  {
572  label own = agglom[faceOwner[facei]];
573  label nei = agglom[faceNeighbour[facei]];
574 
575  m[offsets[own] + nFacesPerCell[own]++] = globalAgglom.toGlobal(nei);
576  m[offsets[nei] + nFacesPerCell[nei]++] = globalAgglom.toGlobal(own);
577  }
578 
579  // For boundary faces is offsetted coupled neighbour
581  {
582  const polyPatch& pp = patches[patchi];
583 
584  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
585  {
586  label facei = pp.start();
587  label bFacei = pp.start() - mesh.nInternalFaces();
588 
589  forAll(pp, i)
590  {
591  label own = agglom[faceOwner[facei]];
592 
593  label globalNei = globalNeighbour[bFacei];
594 
595  if
596  (
597  !globalAgglom.isLocal(globalNei)
598  || globalAgglom.toLocal(globalNei) != own
599  )
600  {
601  m[offsets[own] + nFacesPerCell[own]++] = globalNei;
602  }
603 
604  facei++;
605  bFacei++;
606  }
607  }
608  }
609 
610 
611  // Check for duplicates connections between cells
612  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
613  // Done as postprocessing step since we now have cellCells.
614  label newIndex = 0;
615  labelHashSet nbrCells;
616 
617 
618  if (cellCells.size() == 0)
619  {
620  return;
621  }
622 
623  label startIndex = cellCells.offsets()[0];
624 
625  forAll(cellCells, celli)
626  {
627  nbrCells.clear();
628  nbrCells.insert(globalAgglom.toGlobal(celli));
629 
630  label endIndex = cellCells.offsets()[celli+1];
631 
632  for (label i = startIndex; i < endIndex; i++)
633  {
634  if (nbrCells.insert(cellCells.m()[i]))
635  {
636  cellCells.m()[newIndex++] = cellCells.m()[i];
637  }
638  }
639  startIndex = endIndex;
640  cellCells.offsets()[celli+1] = newIndex;
641  }
642 
643  cellCells.setSize(cellCells.size(), newIndex);
644 }
645 
646 
648 (
649  const polyMesh& mesh,
650  const labelList& agglom,
651  const label nLocalCoarse,
652  const bool parallel,
653  CompactListList<label>& cellCells,
654  CompactListList<scalar>& cellCellWeights
655 )
656 {
657  const labelList& faceOwner = mesh.faceOwner();
658  const labelList& faceNeighbour = mesh.faceNeighbour();
660 
661 
662  // Create global cell numbers
663  // ~~~~~~~~~~~~~~~~~~~~~~~~~~
664 
665  globalIndex globalAgglom
666  (
667  nLocalCoarse,
670  parallel
671  );
672 
673 
674  // Get agglomerate owner on other side of coupled faces
675  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
676 
677  labelList globalNeighbour(mesh.nFaces() - mesh.nInternalFaces());
678 
680  {
681  const polyPatch& pp = patches[patchi];
682 
683  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
684  {
685  label faceI = pp.start();
686  label bFaceI = pp.start() - mesh.nInternalFaces();
687 
688  forAll(pp, i)
689  {
690  globalNeighbour[bFaceI] = globalAgglom.toGlobal
691  (
692  agglom[faceOwner[faceI]]
693  );
694 
695  bFaceI++;
696  faceI++;
697  }
698  }
699  }
700 
701  // Get the cell on the other side of coupled patches
702  syncTools::swapBoundaryFaceList(mesh, globalNeighbour);
703 
704 
705  // Count number of faces (internal + coupled)
706  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
707 
708  // Number of faces per coarse cell
709  labelList nFacesPerCell(nLocalCoarse, 0);
710 
711  for (label faceI = 0; faceI < mesh.nInternalFaces(); faceI++)
712  {
713  label own = agglom[faceOwner[faceI]];
714  label nei = agglom[faceNeighbour[faceI]];
715 
716  nFacesPerCell[own]++;
717  nFacesPerCell[nei]++;
718  }
719 
721  {
722  const polyPatch& pp = patches[patchi];
723 
724  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
725  {
726  label faceI = pp.start();
727  label bFaceI = pp.start()-mesh.nInternalFaces();
728 
729  forAll(pp, i)
730  {
731  label own = agglom[faceOwner[faceI]];
732 
733  label globalNei = globalNeighbour[bFaceI];
734  if
735  (
736  !globalAgglom.isLocal(globalNei)
737  || globalAgglom.toLocal(globalNei) != own
738  )
739  {
740  nFacesPerCell[own]++;
741  }
742 
743  faceI++;
744  bFaceI++;
745  }
746  }
747  }
748 
749 
750  // Fill in offset and data
751  // ~~~~~~~~~~~~~~~~~~~~~~~
752 
753  cellCells.setSize(nFacesPerCell);
754  cellCellWeights.setSize(nFacesPerCell);
755 
756  nFacesPerCell = 0;
757 
758  labelUList& m = cellCells.m();
759  scalarUList& w = cellCellWeights.m();
760  const labelList& offsets = cellCells.offsets();
761 
762  // For internal faces is just offsetted owner and neighbour
763  for (label faceI = 0; faceI < mesh.nInternalFaces(); faceI++)
764  {
765  label own = agglom[faceOwner[faceI]];
766  label nei = agglom[faceNeighbour[faceI]];
767 
768  label ownIndex = offsets[own] + nFacesPerCell[own]++;
769  label neiIndex = offsets[nei] + nFacesPerCell[nei]++;
770 
771  m[ownIndex] = globalAgglom.toGlobal(nei);
772  w[ownIndex] = mesh.magFaceAreas()[faceI];
773  m[neiIndex] = globalAgglom.toGlobal(own);
774  w[ownIndex] = mesh.magFaceAreas()[faceI];
775  }
776 
777  // For boundary faces is offsetted coupled neighbour
779  {
780  const polyPatch& pp = patches[patchi];
781 
782  if (pp.coupled() && (parallel || !isA<processorPolyPatch>(pp)))
783  {
784  label faceI = pp.start();
785  label bFaceI = pp.start()-mesh.nInternalFaces();
786 
787  forAll(pp, i)
788  {
789  label own = agglom[faceOwner[faceI]];
790 
791  label globalNei = globalNeighbour[bFaceI];
792 
793  if
794  (
795  !globalAgglom.isLocal(globalNei)
796  || globalAgglom.toLocal(globalNei) != own
797  )
798  {
799  label ownIndex = offsets[own] + nFacesPerCell[own]++;
800  m[ownIndex] = globalNei;
801  w[ownIndex] = mesh.magFaceAreas()[faceI];
802  }
803 
804  faceI++;
805  bFaceI++;
806  }
807  }
808  }
809 
810 
811  // Check for duplicates connections between cells
812  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
813  // Done as postprocessing step since we now have cellCells.
814  label newIndex = 0;
815  labelHashSet nbrCells;
816 
817 
818  if (cellCells.size() == 0)
819  {
820  return;
821  }
822 
823  label startIndex = cellCells.offsets()[0];
824 
825  forAll(cellCells, cellI)
826  {
827  nbrCells.clear();
828  nbrCells.insert(globalAgglom.toGlobal(cellI));
829 
830  label endIndex = cellCells.offsets()[cellI+1];
831 
832  for (label i = startIndex; i < endIndex; i++)
833  {
834  if (nbrCells.insert(cellCells.m()[i]))
835  {
836  cellCells.m()[newIndex] = cellCells.m()[i];
837  cellCellWeights.m()[newIndex] = cellCellWeights.m()[i];
838  newIndex++;
839  }
840  }
841  startIndex = endIndex;
842  cellCells.offsets()[cellI+1] = newIndex;
843  cellCellWeights.offsets()[cellI+1] = newIndex;
844  }
845 
846  cellCells.setSize(cellCells.size(), newIndex);
847  cellCellWeights.setSize(cellCells.size(), newIndex);
848 }
849 
850 
852 (
853  const polyMesh& mesh,
854  const scalarField& cellWeights,
855  const boolList& blockedFace,
856  const PtrList<labelList>& specifiedProcessorFaces,
857  const labelList& specifiedProcessor,
858  const List<labelPair>& explicitConnections
859 )
860 {
861  // Any weights specified?
862  const bool hasWeights =
863  returnReduce(cellWeights.size(), sumOp<label>()) > 0;
864 
865  // Any processor sets?
866  label nProcSets = 0;
867  forAll(specifiedProcessorFaces, setI)
868  {
869  nProcSets += specifiedProcessorFaces[setI].size();
870  }
871  reduce(nProcSets, sumOp<label>());
872 
873  // Any non-mesh connections?
874  label nConnections = returnReduce
875  (
876  explicitConnections.size(),
877  sumOp<label>()
878  );
879 
880  // Any faces not blocked?
881  label nUnblocked = 0;
882  forAll(blockedFace, facei)
883  {
884  if (!blockedFace[facei])
885  {
886  nUnblocked++;
887  }
888  }
889  reduce(nUnblocked, sumOp<label>());
890 
891 
892  // Either do decomposition on cell centres or on agglomeration
893 
894  labelList finalDecomp;
895 
896 
897  if (nProcSets+nConnections+nUnblocked == 0)
898  {
899  // No constraints, possibly weights
900 
901  if (hasWeights)
902  {
903  finalDecomp = decompose
904  (
905  mesh,
906  mesh.cellCentres(),
907  cellWeights
908  );
909  }
910  else
911  {
912  finalDecomp = decompose(mesh, mesh.cellCentres());
913  }
914  }
915  else
916  {
917  if (debug)
918  {
919  Info<< "Constrained decomposition:" << endl
920  << " faces with same owner and neighbour processor : "
921  << nUnblocked << endl
922  << " baffle faces with same owner processor : "
923  << nConnections << endl
924  << " faces all on same processor : "
925  << nProcSets << endl << endl;
926  }
927 
928  // Determine local regions, separated by blockedFaces
929  regionSplit localRegion(mesh, blockedFace, explicitConnections, false);
930 
931 
932  if (debug)
933  {
934  Info<< "Constrained decomposition:" << endl
935  << " split into " << localRegion.nLocalRegions()
936  << " regions."
937  << endl;
938  }
939 
940  // Determine region cell centres
941  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
942 
943  // This just takes the first cell in the region. Otherwise the problem
944  // is with cyclics - if we'd average the region centre might be
945  // somewhere in the middle of the domain which might not be anywhere
946  // near any of the cells.
947 
948  pointField regionCentres(localRegion.nLocalRegions(), point::max);
949 
950  forAll(localRegion, celli)
951  {
952  label regionI = localRegion[celli];
953 
954  if (regionCentres[regionI] == point::max)
955  {
956  regionCentres[regionI] = mesh.cellCentres()[celli];
957  }
958  }
959 
960  // Do decomposition on agglomeration
961  // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
962 
963  scalarField regionWeights(localRegion.nLocalRegions(), 0);
964 
965  if (hasWeights)
966  {
967  forAll(localRegion, celli)
968  {
969  const label regionI = localRegion[celli];
970 
971  regionWeights[regionI] += cellWeights[celli];
972  }
973  }
974  else
975  {
976  forAll(localRegion, celli)
977  {
978  label regionI = localRegion[celli];
979 
980  regionWeights[regionI] += 1.0;
981  }
982  }
983 
984  finalDecomp = decompose
985  (
986  mesh,
987  localRegion,
988  regionCentres,
989  regionWeights
990  );
991 
992 
993 
994  // Implement the explicitConnections since above decompose
995  // does not know about them
996  forAll(explicitConnections, i)
997  {
998  const labelPair& baffle = explicitConnections[i];
999  label f0 = baffle.first();
1000  label f1 = baffle.second();
1001 
1002  if (!blockedFace[f0] && !blockedFace[f1])
1003  {
1004  // Note: what if internal faces and owner and neighbour on
1005  // different processor? So for now just push owner side
1006  // proc
1007 
1008  const label proci = finalDecomp[mesh.faceOwner()[f0]];
1009 
1010  finalDecomp[mesh.faceOwner()[f1]] = proci;
1011  if (mesh.isInternalFace(f1))
1012  {
1013  finalDecomp[mesh.faceNeighbour()[f1]] = proci;
1014  }
1015  }
1016  else if (blockedFace[f0] != blockedFace[f1])
1017  {
1019  << "On explicit connection between faces " << f0
1020  << " and " << f1
1021  << " the two blockedFace status are not equal : "
1022  << blockedFace[f0] << " and " << blockedFace[f1]
1023  << exit(FatalError);
1024  }
1025  }
1026 
1027 
1028  // blockedFaces corresponding to processor faces need to be handled
1029  // separately since not handled by local regionSplit. We need to
1030  // walk now across coupled faces and make sure to move a whole
1031  // global region across
1032  if (Pstream::parRun())
1033  {
1034  // Re-do regionSplit
1035 
1036  // Field on cells and faces.
1037  List<minData> cellData(mesh.nCells());
1039 
1040  // Take over blockedFaces by seeding a negative number
1041  // (so is always less than the decomposition)
1042  label nUnblocked = 0;
1043  forAll(blockedFace, facei)
1044  {
1045  if (blockedFace[facei])
1046  {
1047  faceData[facei] = minData(-123);
1048  }
1049  else
1050  {
1051  nUnblocked++;
1052  }
1053  }
1054 
1055  // Seed unblocked faces with destination processor
1056  labelList seedFaces(nUnblocked);
1057  List<minData> seedData(nUnblocked);
1058  nUnblocked = 0;
1059 
1060  forAll(blockedFace, facei)
1061  {
1062  if (!blockedFace[facei])
1063  {
1064  label own = mesh.faceOwner()[facei];
1065  seedFaces[nUnblocked] = facei;
1066  seedData[nUnblocked] = minData(finalDecomp[own]);
1067  nUnblocked++;
1068  }
1069  }
1070 
1071 
1072  // Propagate information inwards
1073  FaceCellWave<minData> deltaCalc
1074  (
1075  mesh,
1076  seedFaces,
1077  seedData,
1078  faceData,
1079  cellData,
1081  );
1082 
1083  // And extract
1084  forAll(finalDecomp, celli)
1085  {
1086  if (cellData[celli].valid(deltaCalc.data()))
1087  {
1088  finalDecomp[celli] = cellData[celli].data();
1089  }
1090  }
1091  }
1092 
1093 
1094  // For specifiedProcessorFaces rework the cellToProc to enforce
1095  // all on one processor since we can't guarantee that the input
1096  // to regionSplit was a single region.
1097  // E.g. faceSet 'a' with the cells split into two regions
1098  // by a notch formed by two walls
1099  //
1100  // \ /
1101  // \ /
1102  // ---a----+-----a-----
1103  //
1104  //
1105  // Note that reworking the cellToProc might make the decomposition
1106  // unbalanced.
1107  forAll(specifiedProcessorFaces, setI)
1108  {
1109  const labelList& set = specifiedProcessorFaces[setI];
1110 
1111  label proci = specifiedProcessor[setI];
1112  if (proci == -1)
1113  {
1114  // If no processor specified use the one from the
1115  // 0th element
1116  proci = finalDecomp[mesh.faceOwner()[set[0]]];
1117  }
1118 
1119  forAll(set, fI)
1120  {
1121  const face& f = mesh.faces()[set[fI]];
1122  forAll(f, fp)
1123  {
1124  const labelList& pFaces = mesh.pointFaces()[f[fp]];
1125  forAll(pFaces, i)
1126  {
1127  label facei = pFaces[i];
1128 
1129  finalDecomp[mesh.faceOwner()[facei]] = proci;
1130  if (mesh.isInternalFace(facei))
1131  {
1132  finalDecomp[mesh.faceNeighbour()[facei]] = proci;
1133  }
1134  }
1135  }
1136  }
1137  }
1138 
1139 
1140  if (debug && Pstream::parRun())
1141  {
1142  labelList nbrDecomp;
1143  syncTools::swapBoundaryCellList(mesh, finalDecomp, nbrDecomp);
1144 
1147  {
1148  const polyPatch& pp = patches[patchi];
1149  if (pp.coupled())
1150  {
1151  forAll(pp, i)
1152  {
1153  label facei = pp.start()+i;
1154  label own = mesh.faceOwner()[facei];
1155  label bFacei = facei-mesh.nInternalFaces();
1156 
1157  if (!blockedFace[facei])
1158  {
1159  label ownProc = finalDecomp[own];
1160  label nbrProc = nbrDecomp[bFacei];
1161  if (ownProc != nbrProc)
1162  {
1164  << "patch:" << pp.name()
1165  << " face:" << facei
1166  << " at:" << mesh.faceCentres()[facei]
1167  << " ownProc:" << ownProc
1168  << " nbrProc:" << nbrProc
1169  << exit(FatalError);
1170  }
1171  }
1172  }
1173  }
1174  }
1175  }
1176  }
1177 
1178  return finalDecomp;
1179 }
1180 
1181 
1183 (
1184  const polyMesh& mesh,
1185  boolList& blockedFace,
1186  PtrList<labelList>& specifiedProcessorFaces,
1187  labelList& specifiedProcessor,
1188  List<labelPair>& explicitConnections
1189 )
1190 {
1191  blockedFace.setSize(mesh.nFaces());
1192  blockedFace = true;
1193 
1194  specifiedProcessorFaces.clear();
1195  explicitConnections.clear();
1196 
1197  forAll(constraints_, constraintI)
1198  {
1199  constraints_[constraintI].add
1200  (
1201  mesh,
1202  blockedFace,
1203  specifiedProcessorFaces,
1204  specifiedProcessor,
1205  explicitConnections
1206  );
1207  }
1208 }
1209 
1210 
1212 (
1213  const polyMesh& mesh,
1214  const boolList& blockedFace,
1215  const PtrList<labelList>& specifiedProcessorFaces,
1216  const labelList& specifiedProcessor,
1217  const List<labelPair>& explicitConnections,
1218  labelList& decomposition
1219 )
1220 {
1221  forAll(constraints_, constraintI)
1222  {
1223  constraints_[constraintI].apply
1224  (
1225  mesh,
1226  blockedFace,
1227  specifiedProcessorFaces,
1228  specifiedProcessor,
1229  explicitConnections,
1230  decomposition
1231  );
1232  }
1233 }
1234 
1235 
1237 (
1238  const polyMesh& mesh,
1239  const scalarField& cellWeights
1240 )
1241 {
1242  // Collect all constraints
1243 
1244  boolList blockedFace;
1245  PtrList<labelList> specifiedProcessorFaces;
1246  labelList specifiedProcessor;
1247  List<labelPair> explicitConnections;
1248  setConstraints
1249  (
1250  mesh,
1251  blockedFace,
1252  specifiedProcessorFaces,
1253  specifiedProcessor,
1254  explicitConnections
1255  );
1256 
1257 
1258  // Construct decomposition method and either do decomposition on
1259  // cell centres or on agglomeration
1260 
1261  labelList finalDecomp = decompose
1262  (
1263  mesh,
1264  cellWeights, // optional weights
1265  blockedFace, // any cells to be combined
1266  specifiedProcessorFaces,// any whole cluster of cells to be kept
1267  specifiedProcessor,
1268  explicitConnections // baffles
1269  );
1270 
1271 
1272  // Give any constraint the option of modifying the decomposition
1273 
1274  applyConstraints
1275  (
1276  mesh,
1277  blockedFace,
1278  specifiedProcessorFaces,
1279  specifiedProcessor,
1280  explicitConnections,
1281  finalDecomp
1282  );
1283 
1284  return finalDecomp;
1285 }
1286 
1287 
1288 // ************************************************************************* //
#define forAll(list, i)
Loop across all elements in list.
Definition: UList.H:433
#define forAllConstIter(Container, container, iter)
Iterate across all elements in the container object of type.
Definition: UList.H:476
void setSize(const label mRows)
Reset size of CompactListList.
Wave propagation of information through grid. Every iteration information goes through one layer of c...
Definition: FaceCellWave.H:79
const TrackingData & data() const
Additional data to be passed into container.
Definition: FaceCellWave.H:308
bool insert(const Key &key)
Insert a new entry.
Definition: HashSet.H:109
void clear()
Clear all entries from table.
Definition: HashTable.C:542
IOdictionary is derived from dictionary and IOobject to give the dictionary automatic IO functionalit...
Definition: IOdictionary.H:57
IOobject defines the attributes of an object for which implicit objectRegistry management is supporte...
Definition: IOobject.H:99
void append(const T &)
Append an element at the end of the list.
Definition: ListI.H:178
void size(const label)
Override size to be inconsistent with allocated storage.
Definition: ListI.H:164
void clear()
Clear the list, i.e. set size to zero.
Definition: ListI.H:125
void setSize(const label)
Reset size of List.
Definition: List.C:281
const Type & second() const
Return second.
Definition: PairI.H:121
const Type & first() const
Return first.
Definition: PairI.H:107
A templated 1D list of pointers to objects of type <T>, where the size of the array is known and used...
Definition: PtrList.H:75
void clear()
Clear the PtrList, i.e. set size to zero deleting all the.
Definition: PtrList.C:198
static const word & system()
Return system name.
Definition: TimePaths.H:112
Class to control time during OpenFOAM simulations that is also the top-level objectRegistry.
Definition: Time.H:76
List< Container > list() const
Convert to List<Container>
const UList< label > & offsets() const
Return the offset table (= size()+1)
const UList< T > & m() const
Return the packed matrix of data.
label size() const
Return the primary size, i.e. the number of rows.
T * data()
Return a pointer to the first data element,.
Definition: UListI.H:149
T & last()
Return the last element of the list.
Definition: UListI.H:128
static label worldComm
Default communicator (all processors)
Definition: UPstream.H:278
static bool & parRun()
Is this a parallel run?
Definition: UPstream.H:399
static int & msgType()
Message tag of standard messages.
Definition: UPstream.H:476
label size() const
Return the number of elements in the UPtrList.
Definition: UPtrListI.H:29
static const Form max
Definition: VectorSpace.H:120
static autoPtr< decompositionConstraint > New(const dictionary &constraintsDict, const word &type)
Return a reference to the selected decompositionConstraint.
Abstract base class for decomposition.
decompositionMethod(const dictionary &decompositionDict)
Construct given the decomposition dictionary.
static IOdictionary decomposeParDict(const Time &time)
Read and return the decomposeParDict.
PtrList< decompositionConstraint > constraints_
Optional constraints.
label nWeights(const pointField &points, const scalarField &pointWeights) const
Return the number of weights per point.
virtual labelList decompose(const pointField &points, const scalarField &pointWeights)
Return for every coordinate the wanted processor number.
void setConstraints(const polyMesh &mesh, boolList &blockedFace, PtrList< labelList > &specifiedProcessorFaces, labelList &specifiedProcessor, List< labelPair > &explicitConnections)
Helper: extract constraints:
label checkWeights(const pointField &points, const scalarField &pointWeights) const
Check the weights against the points.
static void calcCellCells(const polyMesh &mesh, const labelList &agglom, const label nLocalCoarse, const bool global, CompactListList< label > &cellCells)
Helper: determine (local or global) cellCells from mesh.
static autoPtr< decompositionMethod > NewDistributor(const dictionary &decompositionDict)
Return a reference to the selected decomposition method.
static labelList scaleWeights(const scalarField &weights, label &nWeights, const bool distributed=true)
Convert the given scalar weights to labels.
void applyConstraints(const polyMesh &mesh, const boolList &blockedFace, const PtrList< labelList > &specifiedProcessorFaces, const labelList &specifiedProcessor, const List< labelPair > &explicitConnections, labelList &finalDecomp)
Helper: apply constraints to a decomposition. This gives.
static autoPtr< decompositionMethod > NewDecomposer(const dictionary &decompositionDict)
Return a reference to the selected decomposition method.
A list of keywords followed by any number of values (e.g. words and numbers) or sub-dictionaries.
Definition: dictionary.H:162
const dictionary & subOrEmptyDict(const word &, const bool mustRead=false) const
Find and return a sub-dictionary.
Definition: dictionary.C:900
const dictionary & subDict(const word &) const
Find and return a sub-dictionary.
Definition: dictionary.C:849
ITstream & lookupBackwardsCompatible(const wordList &, bool recursive=false, bool patternMatch=true) const
Find and return an entry data stream, trying a list of keywords.
Definition: dictionary.C:751
bool found(const word &, bool recursive=false, bool patternMatch=true) const
Search dictionary for given keyword.
Definition: dictionary.C:539
bool open(const fileName &libName, const bool verbose=true)
Open the named library, optionally with warnings if problems occur.
Base class for classes which manage incomplete sets of face data.
A face is a list of labels corresponding to mesh vertices.
Definition: face.H:76
Calculates a unique integer (label so might not have enough room - 2G max) for processor + local inde...
Definition: globalIndex.H:64
label toGlobal(const label i) const
From local to global.
Definition: globalIndexI.H:82
label toLocal(const label i) const
From global to local on current processor.
Definition: globalIndexI.H:117
bool isLocal(const label i) const
Is on local processor.
Definition: globalIndexI.H:95
label nTotalCells() const
Return total number of cells in decomposed mesh.
For use with FaceCellWave. Transports minimum passive data.
Definition: minData.H:64
const word & name() const
Return name.
Foam::polyBoundaryMesh.
Mesh consisting of general polyhedral cells.
Definition: polyMesh.H:80
virtual const faceList & faces() const
Return raw faces.
Definition: polyMesh.C:1344
virtual const labelList & faceOwner() const
Return face owner.
Definition: polyMesh.C:1357
const globalMeshData & globalData() const
Return parallel info.
Definition: polyMesh.C:1521
const polyBoundaryMesh & boundaryMesh() const
Return boundary mesh.
Definition: polyMesh.H:401
virtual const labelList & faceNeighbour() const
Return face neighbour.
Definition: polyMesh.C:1363
A patch is a list of labels that address the faces in the global face list.
Definition: polyPatch.H:70
virtual bool coupled() const
Return true if this patch is geometrically coupled (i.e. faces and.
Definition: polyPatch.H:290
label start() const
Return start label of this patch in the polyMesh face list.
Definition: polyPatch.H:280
const vectorField & faceCentres() const
label nInternalFaces() const
const vectorField & cellCentres() const
label nCells() const
const scalarField & magFaceAreas() const
const labelListList & pointFaces() const
bool isInternalFace(const label faceIndex) const
Return true if given face label is internal to the mesh.
label nFaces() const
This class separates the mesh into distinct unconnected regions, each of which is then given a label ...
Definition: regionSplit.H:118
label nLocalRegions() const
Return local number of regions.
Definition: regionSplit.H:202
static void swapBoundaryFaceList(const polyMesh &mesh, UList< T > &l)
Swap coupled boundary face values.
Definition: syncTools.H:436
static void swapBoundaryCellList(const polyMesh &mesh, const UList< T > &cellData, List< T > &neighbourCellData)
Swap to obtain neighbour cell values for all boundary faces.
A class for handling words, derived from string.
Definition: word.H:62
Foam::fvMesh mesh(Foam::IOobject(regionName, runTime.name(), runTime, Foam::IOobject::MUST_READ), false)
#define FatalErrorInFunction
Report an error message using Foam::FatalError.
Definition: error.H:334
label patchi
const pointField & points
const fvPatchList & patches
bool valid(const PtrList< ModelType > &l)
Namespace for OpenFOAM.
errorManipArg< error, int > exit(error &err, const int errNo=1)
Definition: errorManip.H:124
dlLibraryTable libs
Table of loaded dynamic libraries.
Type gSum(const FieldField< Field, Type > &f)
Ostream & decrIndent(Ostream &os)
Decrement the indent level.
Definition: Ostream.H:242
defineRunTimeSelectionTable(reactionRateFlameArea, dictionary)
intWM_LABEL_SIZE_t label
A label is an int32_t or int64_t as specified by the pre-processor macro WM_LABEL_SIZE.
Definition: label.H:59
Ostream & endl(Ostream &os)
Add newline and flush stream.
Definition: Ostream.H:258
messageStream Info
Ostream & incrIndent(Ostream &os)
Increment the indent level.
Definition: Ostream.H:235
void reduce(const List< UPstream::commsStruct > &comms, T &Value, const BinaryOp &bop, const int tag, const label comm)
T returnReduce(const T &Value, const BinaryOp &bop, const int tag=Pstream::msgType(), const label comm=UPstream::worldComm)
defineTypeNameAndDebug(combustionModel, 0)
dimensioned< Type > sum(const DimensionedField< Type, GeoMesh, PrimitiveField > &df)
error FatalError
static const char nl
Definition: Ostream.H:267
fileType type(const fileName &, const bool checkVariants=true, const bool followLink=true)
Return the file type: directory or file.
Definition: POSIX.C:488
labelList f(nPoints)
Info<< "Finished reading KIVA file"<< endl;cellShapeList cellShapes(nPoints);labelList cellZoning(nPoints, -1);const cellModel &hex=*(cellModeller::lookup("hex"));labelList hexLabels(8);label activeCells=0;labelList pointMap(nPoints);forAll(pointMap, i){ pointMap[i]=i;}for(label i=0;i< nPoints;i++){ if(f[i] > 0.0) { hexLabels[0]=i;hexLabels[1]=i1tab[i];hexLabels[2]=i3tab[i1tab[i]];hexLabels[3]=i3tab[i];hexLabels[4]=i8tab[i];hexLabels[5]=i1tab[i8tab[i]];hexLabels[6]=i3tab[i1tab[i8tab[i]]];hexLabels[7]=i3tab[i8tab[i]];cellShapes[activeCells]=cellShape(hex, hexLabels);edgeList edges=cellShapes[activeCells].edges();forAll(edges, ei) { if(edges[ei].mag(points)< small) { label start=pointMap[edges[ei].start()];while(start !=pointMap[start]) { start=pointMap[start];} label end=pointMap[edges[ei].end()];while(end !=pointMap[end]) { end=pointMap[end];} label minLabel=min(start, end);pointMap[start]=pointMap[end]=minLabel;} } cellZoning[activeCells]=idreg[i];activeCells++;}}cellShapes.setSize(activeCells);cellZoning.setSize(activeCells);forAll(cellShapes, celli){ cellShape &cs=cellShapes[celli];forAll(cs, i) { cs[i]=pointMap[cs[i]];} cs.collapse();}label bcIDs[11]={-1, 0, 2, 4, -1, 5, -1, 6, 7, 8, 9};const label nBCs=12;const word *kivaPatchTypes[nBCs]={ &wallPolyPatch::typeName, &wallPolyPatch::typeName, &wallPolyPatch::typeName, &wallPolyPatch::typeName, &symmetryPolyPatch::typeName, &wedgePolyPatch::typeName, &polyPatch::typeName, &polyPatch::typeName, &polyPatch::typeName, &polyPatch::typeName, &symmetryPolyPatch::typeName, &mergedCyclicPolyPatch::typeName};enum patchTypeNames{ PISTON, VALVE, LINER, CYLINDERHEAD, AXIS, WEDGE, INFLOW, OUTFLOW, PRESIN, PRESOUT, SYMMETRYPLANE, CYCLIC};const char *kivaPatchNames[nBCs]={ "piston", "valve", "liner", "cylinderHead", "axis", "wedge", "inflow", "outflow", "presin", "presout", "symmetryPlane", "cyclic"};List< SLList< face > > pFaces[nBCs]
Definition: readKivaGrid.H:229
dictionary dict
Operator to apply a binary operation to a pair of lists.
Definition: ListOps.H:288