ENH: added a general framework for normalization and setting targets

for all objective functions.

- The normalization is useful for practically all update methods dealing
with constraints (e.g. SQP, MMA). The normalization factor can be either
given explicitly or, if not given, will be the value of the objective
function in the first optimisation cycle.
- The target value is useful when using the objective as a constraint in
constrained optimisation problems (e.g. drag - dragTarget). It should
only be used with update methods that understand the value of the
constraint (e.g. SQP, MMA) but not when the objective in hand is the
only objective of the optimisation problem. In such a case, a squared
objective should be used (e.g. sqr(drag - dragTarget))
This commit is contained in:
Vaggelis Papoutsis
2020-05-28 11:43:09 +03:00
committed by Andrew Heather
parent 4d67819a2c
commit 6ee7bc66c5
7 changed files with 186 additions and 5 deletions

View File

@ -151,13 +151,17 @@ bool objectiveManager::readDict(const dictionary& dict)
return true;
}
void objectiveManager::updateNormalizationFactor()
{
// Update normalization factors for all objectives
for (objective& obj : objectives_)
{
if (obj.normalize())
{
obj.updateNormalizationFactor();
}
}
}

View File

@ -128,6 +128,67 @@ autoPtr<objectiveIncompressible> objectiveIncompressible::New
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void objectiveIncompressible::doNormalization()
{
if (normalize_ && normFactor_.valid())
{
const scalar oneOverNorm(1./normFactor_());
if (hasdJdv())
{
dJdvPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdp())
{
dJdpPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdT())
{
dJdTPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdTMVar1())
{
dJdTMvar1Ptr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdTMVar2())
{
dJdTMvar2Ptr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasBoundarydJdv())
{
bdJdvPtr_() *= oneOverNorm;
}
if (hasBoundarydJdvn())
{
bdJdvnPtr_() *= oneOverNorm;
}
if (hasBoundarydJdvt())
{
bdJdvtPtr_() *= oneOverNorm;
}
if (hasBoundarydJdp())
{
bdJdpPtr_() *= oneOverNorm;
}
if (hasBoundarydJdT())
{
bdJdTPtr_() *= oneOverNorm;
}
if (hasBoundarydJdTMVar1())
{
bdJdTMvar1Ptr_() *= oneOverNorm;
}
if (hasBoundarydJdTMVar2())
{
bdJdTMvar2Ptr_() *= oneOverNorm;
}
// Normalize geometric fields
objective::doNormalization();
}
}
const volVectorField& objectiveIncompressible::dJdv()
{
if (dJdvPtr_.empty())
@ -418,6 +479,9 @@ void objectiveIncompressible::update()
update_dxdbDirectMultiplier();
update_boundaryEdgeContribution();
update_dJdStressMultiplier();
// Divide everything with normalization factor
doNormalization();
}

View File

@ -165,6 +165,9 @@ public:
//- Return the objective function value
virtual scalar J() = 0;
//- Normalize all fields allocated by the objective
virtual void doNormalization();
//- Contribution to field adjoint momentum eqs
const volVectorField& dJdv();

View File

@ -247,6 +247,7 @@ void objectivePtLosses::write() const
setObjectiveFilePtr();
objFunctionFilePtr_() << setw(4) << "#" << " ";
objFunctionFilePtr_() << setw(width) << "ptLosses" << " ";
objFunctionFilePtr_() << setw(width) << "JCycle" << " ";
forAll(patches_, oI)
{
label patchI = patches_[oI];
@ -258,6 +259,7 @@ void objectivePtLosses::write() const
objFunctionFilePtr_() << setw(4) << mesh_.time().value() << " ";
objFunctionFilePtr_() << setw(width) << J_ << " ";
objFunctionFilePtr_() << setw(width) << JCycle() << " ";
forAll(patchPt_, pI)
{
objFunctionFilePtr_() << setw(width) << patchPt_[pI] << " ";

View File

@ -59,8 +59,10 @@ class objectivePtLosses
{
// Private data
//- Patches on which to compute losses
labelList patches_;
//- Total pressure per patch on patches_
scalarField patchPt_;

View File

@ -129,10 +129,13 @@ objective::objective
objectiveName_(dict.dictName()),
computeMeanFields_(false), // is reset in derived classes
nullified_(false),
normalize_(dict.getOrDefault<bool>("normalize", false)),
J_(Zero),
JMean_(this->getOrDefault<scalar>("JMean", Zero)),
weight_(Zero),
normFactor_(nullptr),
target_(dict.getOrDefault<scalar>("target", Zero)),
integrationStartTimePtr_(nullptr),
integrationEndTimePtr_(nullptr),
@ -174,6 +177,20 @@ objective::objective
new scalar(dict.get<scalar>("integrationEndTime"))
);
}
// Set normalization factor, if present
if (normalize_)
{
scalar normFactor(Zero);
if (dict.readIfPresent("normFactor", normFactor))
{
normFactor_.reset(new scalar(normFactor));
}
else if (this->readIfPresent("normFactor", normFactor))
{
normFactor_.reset(new scalar(normFactor));
}
}
}
@ -234,13 +251,26 @@ scalar objective::JCycle() const
{
J = JMean_;
}
// Subtract target, in case the objective is used as a constraint
J -= target_;
// Normalize here, in order to get the correct value for line search
if (normalize_ && normFactor_.valid())
{
J /= normFactor_();
}
return J;
}
void objective::updateNormalizationFactor()
{
// Does nothing in base
if (normalize_ && normFactor_.empty())
{
normFactor_.reset(new scalar(JCycle()));
}
}
@ -289,6 +319,62 @@ scalar objective::weight() const
}
bool objective::normalize() const
{
return normalize_;
}
void objective::doNormalization()
{
if (normalize_ && normFactor_.valid())
{
const scalar oneOverNorm(1./normFactor_());
if (hasdJdb())
{
dJdbPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasBoundarydJdb())
{
bdJdbPtr_() *= oneOverNorm;
}
if (hasdSdbMult())
{
bdSdbMultPtr_() *= oneOverNorm;
}
if (hasdndbMult())
{
bdndbMultPtr_() *= oneOverNorm;
}
if (hasdxdbMult())
{
bdxdbMultPtr_() *= oneOverNorm;
}
if (hasdxdbDirectMult())
{
bdxdbDirectMultPtr_() *= oneOverNorm;
}
if (hasBoundaryEdgeContribution())
{
bEdgeContribution_() *= oneOverNorm;
}
if (hasDivDxDbMult())
{
divDxDbMultPtr_() *= oneOverNorm;
}
if (hasGradDxDbMult())
{
gradDxDbMultPtr_() *= oneOverNorm;
}
if (hasBoundarydJdStress())
{
bdJdStressPtr_() *= oneOverNorm;
}
}
}
bool objective::isWithinIntegrationTime() const
{
if (hasIntegrationStartTime() && hasIntegrationEndTime())
@ -667,6 +753,10 @@ void objective::writeMeanValue() const
bool objective::writeData(Ostream& os) const
{
os.writeEntry("JMean", JMean_);
if (normFactor_.valid())
{
os.writeEntry("normFactor", normFactor_());
}
return os.good();
}

View File

@ -72,6 +72,7 @@ protected:
const word objectiveName_;
bool computeMeanFields_;
bool nullified_;
bool normalize_;
//- Objective function value and weight
scalar J_;
@ -82,12 +83,21 @@ protected:
//- Objective weight
scalar weight_;
// Objective integration start and end times (for unsteady flows)
//- Normalization factor
autoPtr<scalar> normFactor_;
//- Target value, in case the objective is used as a constraint
// Should be used in caution and with updateMethods than get affected
// by the target value, without requiring a sqr (e.g. SQP, MMA)
scalar target_;
//- Objective integration start and end times (for unsteady flows)
autoPtr<scalar> integrationStartTimePtr_;
autoPtr<scalar> integrationEndTimePtr_;
// Contribution to field sensitivity derivatives
// Topology optimisation
//- Contribution to field sensitivity derivatives
// Topology optimisation or other variants with
// as many design variables as the mesh cells
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
autoPtr<volScalarField> dJdbPtr_;
@ -248,6 +258,12 @@ public:
//- Return the objective function weight
scalar weight() const;
//- Is the objective normalized
bool normalize() const;
//- Normalize all fields allocated by the objective
virtual void doNormalization();
//- Check whether this is an objective integration time
bool isWithinIntegrationTime() const;