65 if (optMethodIODict_.headerOk())
67 optMethodIODict_.readEntry(
"dxOld", dxOld_);
68 optMethodIODict_.readEntry(
"sOld", sOld_);
69 optMethodIODict_.readEntry(
"counter", counter_);
70 optMethodIODict_.readEntry(
"eta", eta_);
72 label nDVs = optMethodIODict_.get<label>(
"nDVs");
75 if (activeDesignVars_.empty())
85 Foam::conjugateGradient::conjugateGradient
88 const dictionary&
dict
99 coeffsDict().getOrDefault<word>(
"betaType",
"FletcherReeves")
109 Info<<
"\t Did not find explicit definition of active design variables. "
110 <<
"Treating all available ones as active " <<
endl;
118 && !(
betaType_ ==
"PolakRibiereRestarted")
122 <<
"Invalid betaType " <<
betaType_ <<
". Valid options are "
123 <<
"FletcherReeves, PolakRibiere, PolakRibiereRestarted"
141 Info<<
"Using steepest descent for the first iteration" <<
endl;
142 correction_ = -eta_*objectiveDerivatives_;
144 dxOld_.map(-objectiveDerivatives_, activeDesignVars_);
150 dx.map(-objectiveDerivatives_, activeDesignVars_);
153 if (betaType_ ==
"FletcherReeves")
155 beta = globalSum(dx*dx)/globalSum(dxOld_ * dxOld_);
157 else if (betaType_ ==
"PolakRibiere")
159 beta = globalSum(dx*(dx - dxOld_))/globalSum(dxOld_ * dxOld_);
161 else if (betaType_ ==
"PolakRibiereRestarted")
167 globalSum(dx*(dx - dxOld_))/globalSum(dxOld_ * dxOld_)
169 if (
beta == scalar(0))
171 Info<<
"Computed negative beta. Resetting to zero" <<
endl;
178 forAll(activeDesignVars_, varI)
180 correction_[activeDesignVars_[varI]] = eta_*
s[varI];
197 sOld_.map(oldCorrection, activeDesignVars_);
199 correction_ = oldCorrection;
205 optMethodIODict_.add<
scalarField>(
"dxOld", dxOld_,
true);
206 optMethodIODict_.add<
scalarField>(
"sOld", sOld_,
true);
207 optMethodIODict_.add<label>(
"counter", counter_,
true);
208 optMethodIODict_.add<label>(
"nDVs", objectiveDerivatives_.size(),
true);