ENH: extend bitSet functionality

- num_blocks(), test_set() as per boost
- broadcast(), reduceAnd(), reduceOr() to simplify parallel operations
- matrix-like output for PackedList::writeList()

BUG: Pstream::broadcastList() missing resize on sub-ranks

- latent bug since it was unused in any OpenFOAM code
This commit is contained in:
Mark Olesen
2025-08-28 14:33:18 +02:00
parent 19caabbd56
commit bd57627955
17 changed files with 433 additions and 117 deletions

View File

@ -1,3 +1,3 @@
Test-PackedList.C
Test-PackedList.cxx
EXE = $(FOAM_USER_APPBIN)/Test-PackedList

View File

@ -142,6 +142,9 @@ int main(int argc, char *argv[])
Info<< "got: " << bset1 << nl
<< "and: " << bset2 << nl
<< "and: " << bset3 << nl;
Info<< "==";
bset3.writeList(Info, 10) << nl; // matrix-like output
}
}

View File

@ -137,11 +137,11 @@ inline bool compare
const std::string& expected
)
{
const List<unsigned int>& store = bitset.storage();
const auto& store = bitset.storage();
std::string has;
for (label blocki=0; blocki < bitset.nBlocks(); ++blocki)
for (label blocki=0; blocki < bitset.num_blocks(); ++blocki)
{
has += toString(store[blocki]);
}

View File

@ -185,9 +185,9 @@ int main(int argc, char *argv[])
}
broadcast_chunks<labelList, label>(input1);
Pstream::maxCommsSize = 33;
UPstream::maxCommsSize = 33;
args.readIfPresent("comms-size", Pstream::maxCommsSize);
args.readIfPresent("comms-size", UPstream::maxCommsSize);
broadcast_chunks<labelList, label>(input1);
@ -197,11 +197,11 @@ int main(int argc, char *argv[])
PstreamBuffers pBufs;
labelList sendData;
if (Pstream::master())
if (UPstream::master())
{
sendData = identity(500);
for (const int proci : Pstream::subProcs())
for (const int proci : UPstream::subProcs())
{
UOPstream os(proci, pBufs);
os << sendData;
@ -211,7 +211,7 @@ int main(int argc, char *argv[])
Info<< "call finishedSends()" << endl;
pBufs.finishedScatters();
if (!Pstream::master())
if (UPstream::is_subrank())
{
UIPstream is(UPstream::masterNo(), pBufs);
is >> sendData;
@ -225,11 +225,11 @@ int main(int argc, char *argv[])
labelListList recvBufs(UPstream::nProcs());
labelList recvSizes;
if (Pstream::master())
if (UPstream::master())
{
for (const int proci : Pstream::allProcs())
for (const int proci : UPstream::allProcs())
{
if (proci != Pstream::myProcNo())
if (proci != UPstream::myProcNo())
{
sendBufs[proci] = identity(500);
}
@ -253,11 +253,11 @@ int main(int argc, char *argv[])
Map<labelList> recvBufs;
Map<label> recvSizes;
if (Pstream::master())
if (UPstream::master())
{
for (const int proci : Pstream::allProcs())
for (const int proci : UPstream::allProcs())
{
if (proci != Pstream::myProcNo())
if (proci != UPstream::myProcNo())
{
sendBufs(proci) = identity(500);
}

View File

@ -110,21 +110,25 @@ int main(int argc, char *argv[])
<< " (self) reduced " << selfVal << nl;
// Identical size on all procs
bitSet procUsed(nProcs);
if ((myRank % 4) == 0)
{
procUsed.set(myRank);
bitSet localUsed(nProcs);
localUsed.set(myRank, ((myRank % 4) == 0));
Pout<< "local procUsed " << localUsed << nl;
localUsed.reduceOr(UPstream::worldComm, false);
Pout<< "reduce procUsed " << localUsed << nl;
}
// With allGather
{
bitSet procUsed
(
bitSet::allGather((myRank % 4) == 0)
);
Pout<< "allGather: " << procUsed << nl;
}
Pout<< "local procUsed " << procUsed << nl;
reduce
(
procUsed.data(),
procUsed.size_data(),
bitOrOp<unsigned int>()
);
Pout<< "reduce procUsed " << procUsed << nl;
// Identical size on all procs
// encode as 0:empty, 1:uniform, 2:nonuniform, 3:mixed
@ -147,12 +151,26 @@ int main(int argc, char *argv[])
}
Pout<< "local uniform " << uniformity << nl;
reduce
// reduce with op<..>()
#if 1
Foam::reduce
(
uniformity.data(),
uniformity.size_data(),
bitOrOp<unsigned int>()
uniformity.num_blocks(),
bitOrOp<unsigned int>(),
UPstream::msgType(), // ignored
UPstream::worldComm
);
#else
// Direct call to MPI_Allreduce
UPstream::mpiAllReduce
(
uniformity.data(),
uniformity.num_blocks(),
UPstream::opCodes::op_bit_or,
UPstream::worldComm
);
#endif
Pout<< "reduce uniform " << uniformity << nl;
}
@ -160,8 +178,8 @@ int main(int argc, char *argv[])
{
Pair<label> val
(
Pstream::myProcNo(UPstream::commWorld()),
Pstream::myProcNo(UPstream::commWorld())
UPstream::myProcNo(UPstream::commWorld()),
UPstream::myProcNo(UPstream::commWorld())
);
Pair<label> worldVal = val;

View File

@ -79,7 +79,7 @@ int main(int argc, char *argv[])
#include "setRootCase.H"
if (!Pstream::parRun())
if (!UPstream::parRun())
{
Info<< "\nWarning: not parallel - skipping further tests\n" << endl;
return 0;
@ -97,7 +97,7 @@ int main(int argc, char *argv[])
DynamicList<MPI_Request> recvRequests(10);
if (!Pstream::master())
if (UPstream::is_subrank())
{
// Send some random length to master

View File

@ -76,7 +76,7 @@ int main(int argc, char *argv[])
#include "setRootCase.H"
if (!Pstream::parRun())
if (!UPstream::parRun())
{
Info<< "\nWarning: not parallel - skipping further tests\n" << endl;
return 0;
@ -96,7 +96,7 @@ int main(int argc, char *argv[])
// Map request indices to procs
Map<label> recvFromProc(20);
if (!Pstream::master())
if (UPstream::is_subrank())
{
// Send some random length to master

View File

@ -52,7 +52,7 @@ int main(int argc, char *argv[])
const bool optNonBlocking = args.found("non-blocking");
if (!Pstream::parRun())
if (!UPstream::parRun())
{
Info<< "\nWarning: not parallel - skipping further tests\n" << endl;
return 0;
@ -73,7 +73,7 @@ int main(int argc, char *argv[])
DynamicList<UPstream::Request> sendRequests(10);
DynamicList<UPstream::Request> recvRequests(10);
if (!Pstream::master())
if (UPstream::is_subrank())
{
// Send some random length to master