/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright (C) 1991-2009 OpenCFD Ltd. \\/ M anipulation | ------------------------------------------------------------------------------- License This file is part of OpenFOAM. OpenFOAM is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA \*---------------------------------------------------------------------------*/ #include "genericFvPatchField.H" #include "fvPatchFieldMapper.H" // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // template Foam::genericFvPatchField::genericFvPatchField ( const fvPatch& p, const DimensionedField& iF ) : calculatedFvPatchField(p, iF) { FatalErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch& p, const DimensionedField& iF)" ) << "Not Implemented\n " << "Trying to construct an genericFvPatchField on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << abort(FatalError); } template Foam::genericFvPatchField::genericFvPatchField ( const fvPatch& p, const DimensionedField& iF, const dictionary& dict ) : calculatedFvPatchField(p, iF, dict, false), actualTypeName_(dict.lookup("type")), dict_(dict) { if (!dict.found("value")) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, const dictionary&)", dict ) << "\n Cannot find 'value' entry" << " on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << nl << " which is required to set the" " values of the generic patch field." << nl << " (Actual type " << actualTypeName_ << ")" << nl << "\n Please add the 'value' entry to the write function " "of the user-defined boundary-condition\n" << exit(FatalIOError); } for ( dictionary::const_iterator iter = dict_.begin(); iter != dict_.end(); ++iter ) { if (iter().keyword() != "type" && iter().keyword() != "value") { if ( iter().isStream() && iter().stream().size() ) { ITstream& is = iter().stream(); // Read first token token firstToken(is); if ( firstToken.isWord() && firstToken.wordToken() == "nonuniform" ) { token fieldToken(is); if (!fieldToken.isCompound()) { if ( fieldToken.isLabel() && fieldToken.labelToken() == 0 ) { scalarFields_.insert ( iter().keyword(), new scalarField(0) ); } else { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n token following 'nonuniform' " "is not a compound" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } else if ( fieldToken.compoundToken().type() == token::Compound >::typeName ) { scalarField* fPtr = new scalarField; fPtr->transfer ( dynamicCast > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } scalarFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound >::typeName ) { vectorField* fPtr = new vectorField; fPtr->transfer ( dynamicCast > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } vectorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound >::typeName ) { sphericalTensorField* fPtr = new sphericalTensorField; fPtr->transfer ( dynamicCast < token::Compound > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } sphericalTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound >::typeName ) { symmTensorField* fPtr = new symmTensorField; fPtr->transfer ( dynamicCast < token::Compound > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } symmTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound >::typeName ) { tensorField* fPtr = new tensorField; fPtr->transfer ( dynamicCast > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } tensorFields_.insert(iter().keyword(), fPtr); } else { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n compound " << fieldToken.compoundToken() << " not supported" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } else if ( firstToken.isWord() && firstToken.wordToken() == "uniform" ) { token fieldToken(is); if (!fieldToken.isPunctuation()) { scalarFields_.insert ( iter().keyword(), new scalarField ( this->size(), fieldToken.scalarToken() ) ); } else { // Read as scalarList. is.putBack(fieldToken); scalarList l(is); if (l.size() == vector::nComponents) { vector vs(l[0], l[1], l[2]); vectorFields_.insert ( iter().keyword(), new vectorField(this->size(), vs) ); } else if (l.size() == sphericalTensor::nComponents) { sphericalTensor vs(l[0]); sphericalTensorFields_.insert ( iter().keyword(), new sphericalTensorField(this->size(), vs) ); } else if (l.size() == symmTensor::nComponents) { symmTensor vs(l[0], l[1], l[2], l[3], l[4], l[5]); symmTensorFields_.insert ( iter().keyword(), new symmTensorField(this->size(), vs) ); } else if (l.size() == tensor::nComponents) { tensor vs ( l[0], l[1], l[2], l[3], l[4], l[5], l[6], l[7], l[8] ); tensorFields_.insert ( iter().keyword(), new tensorField(this->size(), vs) ); } else { FatalIOErrorIn ( "genericFvPatchField::genericFvPatchField" "(const fvPatch&, const Field&, " "const dictionary&)", dict ) << "\n unrecognised native type " << l << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } } } } } } template Foam::genericFvPatchField::genericFvPatchField ( const genericFvPatchField& ptf, const fvPatch& p, const DimensionedField& iF, const fvPatchFieldMapper& mapper ) : calculatedFvPatchField(ptf, p, iF, mapper), actualTypeName_(ptf.actualTypeName_), dict_(ptf.dict_) { for ( HashPtrTable::const_iterator iter = ptf.scalarFields_.begin(); iter != ptf.scalarFields_.end(); ++iter ) { scalarFields_.insert(iter.key(), new scalarField(*iter(), mapper)); } for ( HashPtrTable::const_iterator iter = ptf.vectorFields_.begin(); iter != ptf.vectorFields_.end(); ++iter ) { vectorFields_.insert(iter.key(), new vectorField(*iter(), mapper)); } for ( HashPtrTable::const_iterator iter = ptf.sphericalTensorFields_.begin(); iter != ptf.sphericalTensorFields_.end(); ++iter ) { sphericalTensorFields_.insert ( iter.key(), new sphericalTensorField(*iter(), mapper) ); } for ( HashPtrTable::const_iterator iter = ptf.symmTensorFields_.begin(); iter != ptf.symmTensorFields_.end(); ++iter ) { symmTensorFields_.insert ( iter.key(), new symmTensorField(*iter(), mapper) ); } for ( HashPtrTable::const_iterator iter = ptf.tensorFields_.begin(); iter != ptf.tensorFields_.end(); ++iter ) { tensorFields_.insert(iter.key(), new tensorField(*iter(), mapper)); } } template Foam::genericFvPatchField::genericFvPatchField ( const genericFvPatchField& ptf ) : calculatedFvPatchField(ptf), actualTypeName_(ptf.actualTypeName_), dict_(ptf.dict_), scalarFields_(ptf.scalarFields_), vectorFields_(ptf.vectorFields_), sphericalTensorFields_(ptf.sphericalTensorFields_), symmTensorFields_(ptf.symmTensorFields_), tensorFields_(ptf.tensorFields_) {} template Foam::genericFvPatchField::genericFvPatchField ( const genericFvPatchField& ptf, const DimensionedField& iF ) : calculatedFvPatchField(ptf, iF), actualTypeName_(ptf.actualTypeName_), dict_(ptf.dict_), scalarFields_(ptf.scalarFields_), vectorFields_(ptf.vectorFields_), sphericalTensorFields_(ptf.sphericalTensorFields_), symmTensorFields_(ptf.symmTensorFields_), tensorFields_(ptf.tensorFields_) {} // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // template void Foam::genericFvPatchField::autoMap ( const fvPatchFieldMapper& m ) { calculatedFvPatchField::autoMap(m); for ( HashPtrTable::iterator iter = scalarFields_.begin(); iter != scalarFields_.end(); ++iter ) { iter()->autoMap(m); } for ( HashPtrTable::iterator iter = vectorFields_.begin(); iter != vectorFields_.end(); ++iter ) { iter()->autoMap(m); } for ( HashPtrTable::iterator iter = sphericalTensorFields_.begin(); iter != sphericalTensorFields_.end(); ++iter ) { iter()->autoMap(m); } for ( HashPtrTable::iterator iter = symmTensorFields_.begin(); iter != symmTensorFields_.end(); ++iter ) { iter()->autoMap(m); } for ( HashPtrTable::iterator iter = tensorFields_.begin(); iter != tensorFields_.end(); ++iter ) { iter()->autoMap(m); } } template void Foam::genericFvPatchField::rmap ( const fvPatchField& ptf, const labelList& addr ) { calculatedFvPatchField::rmap(ptf, addr); const genericFvPatchField& dptf = refCast >(ptf); for ( HashPtrTable::iterator iter = scalarFields_.begin(); iter != scalarFields_.end(); ++iter ) { HashPtrTable::const_iterator dptfIter = dptf.scalarFields_.find(iter.key()); if (dptfIter != dptf.scalarFields_.end()) { iter()->rmap(*dptfIter(), addr); } } for ( HashPtrTable::iterator iter = vectorFields_.begin(); iter != vectorFields_.end(); ++iter ) { HashPtrTable::const_iterator dptfIter = dptf.vectorFields_.find(iter.key()); if (dptfIter != dptf.vectorFields_.end()) { iter()->rmap(*dptfIter(), addr); } } for ( HashPtrTable::iterator iter = sphericalTensorFields_.begin(); iter != sphericalTensorFields_.end(); ++iter ) { HashPtrTable::const_iterator dptfIter = dptf.sphericalTensorFields_.find(iter.key()); if (dptfIter != dptf.sphericalTensorFields_.end()) { iter()->rmap(*dptfIter(), addr); } } for ( HashPtrTable::iterator iter = symmTensorFields_.begin(); iter != symmTensorFields_.end(); ++iter ) { HashPtrTable::const_iterator dptfIter = dptf.symmTensorFields_.find(iter.key()); if (dptfIter != dptf.symmTensorFields_.end()) { iter()->rmap(*dptfIter(), addr); } } for ( HashPtrTable::iterator iter = tensorFields_.begin(); iter != tensorFields_.end(); ++iter ) { HashPtrTable::const_iterator dptfIter = dptf.tensorFields_.find(iter.key()); if (dptfIter != dptf.tensorFields_.end()) { iter()->rmap(*dptfIter(), addr); } } } template Foam::tmp > Foam::genericFvPatchField::valueInternalCoeffs ( const tmp& ) const { FatalErrorIn ( "genericFvPatchField::" "valueInternalCoeffs(const tmp&) const" ) << "\n " "valueInternalCoeffs cannot be called for a genericFvPatchField" " (actual type " << actualTypeName_ << ")" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << "\n You are probably trying to solve for a field with a " "generic boundary condition." << exit(FatalError); return *this; } template Foam::tmp > Foam::genericFvPatchField::valueBoundaryCoeffs ( const tmp& ) const { FatalErrorIn ( "genericFvPatchField::" "valueBoundaryCoeffs(const tmp&) const" ) << "\n " "valueBoundaryCoeffs cannot be called for a genericFvPatchField" " (actual type " << actualTypeName_ << ")" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << "\n You are probably trying to solve for a field with a " "generic boundary condition." << exit(FatalError); return *this; } template Foam::tmp > Foam::genericFvPatchField::gradientInternalCoeffs() const { FatalErrorIn ( "genericFvPatchField::" "gradientInternalCoeffs() const" ) << "\n " "gradientInternalCoeffs cannot be called for a genericFvPatchField" " (actual type " << actualTypeName_ << ")" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << "\n You are probably trying to solve for a field with a " "generic boundary condition." << exit(FatalError); return *this; } template Foam::tmp > Foam::genericFvPatchField::gradientBoundaryCoeffs() const { FatalErrorIn ( "genericFvPatchField::" "gradientBoundaryCoeffs() const" ) << "\n " "gradientBoundaryCoeffs cannot be called for a genericFvPatchField" " (actual type " << actualTypeName_ << ")" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << "\n You are probably trying to solve for a field with a " "generic boundary condition." << exit(FatalError); return *this; } template void Foam::genericFvPatchField::write(Ostream& os) const { os.writeKeyword("type") << actualTypeName_ << token::END_STATEMENT << nl; for ( dictionary::const_iterator iter = dict_.begin(); iter != dict_.end(); ++iter ) { if (iter().keyword() != "type" && iter().keyword() != "value") { if ( iter().isStream() && iter().stream().size() && iter().stream()[0].isWord() && iter().stream()[0].wordToken() == "nonuniform" ) { if (scalarFields_.found(iter().keyword())) { scalarFields_.find(iter().keyword())() ->writeEntry(iter().keyword(), os); } else if (vectorFields_.found(iter().keyword())) { vectorFields_.find(iter().keyword())() ->writeEntry(iter().keyword(), os); } else if (sphericalTensorFields_.found(iter().keyword())) { sphericalTensorFields_.find(iter().keyword())() ->writeEntry(iter().keyword(), os); } else if (symmTensorFields_.found(iter().keyword())) { symmTensorFields_.find(iter().keyword())() ->writeEntry(iter().keyword(), os); } else if (tensorFields_.found(iter().keyword())) { tensorFields_.find(iter().keyword())() ->writeEntry(iter().keyword(), os); } } else { iter().write(os); } } } this->writeEntry("value", os); } // ************************************************************************* //