00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026 #include "genericPointPatchField.H"
00027 #include <OpenFOAM/pointPatchFieldMapper.H>
00028
00029
00030
00031 namespace Foam
00032 {
00033
00034
00035
00036 template<class Type>
00037 genericPointPatchField<Type>::genericPointPatchField
00038 (
00039 const pointPatch& p,
00040 const DimensionedField<Type, pointMesh>& iF
00041 )
00042 :
00043 calculatedPointPatchField<Type>(p, iF)
00044 {
00045 notImplemented
00046 (
00047 "genericPointPatchField<Type>::genericPointPatchField"
00048 "(const pointPatch& p, const DimensionedField<Type, volMesh>& iF)"
00049 );
00050 }
00051
00052
00053 template<class Type>
00054 genericPointPatchField<Type>::genericPointPatchField
00055 (
00056 const pointPatch& p,
00057 const DimensionedField<Type, pointMesh>& iF,
00058 const dictionary& dict
00059 )
00060 :
00061 calculatedPointPatchField<Type>(p, iF, dict),
00062 actualTypeName_(dict.lookup("type")),
00063 dict_(dict)
00064 {
00065 for
00066 (
00067 dictionary::const_iterator iter = dict_.begin();
00068 iter != dict_.end();
00069 ++iter
00070 )
00071 {
00072 if (iter().keyword() != "type")
00073 {
00074 if
00075 (
00076 iter().isStream()
00077 && iter().stream().size()
00078 )
00079 {
00080 ITstream& is = iter().stream();
00081
00082
00083 token firstToken(is);
00084
00085 if
00086 (
00087 firstToken.isWord()
00088 && firstToken.wordToken() == "nonuniform"
00089 )
00090 {
00091 token fieldToken(is);
00092
00093 if (!fieldToken.isCompound())
00094 {
00095 if
00096 (
00097 fieldToken.isLabel()
00098 && fieldToken.labelToken() == 0
00099 )
00100 {
00101 scalarFields_.insert
00102 (
00103 iter().keyword(),
00104 new scalarField(0)
00105 );
00106 }
00107 else
00108 {
00109 FatalIOErrorIn
00110 (
00111 "genericPointPatchField<Type>::"
00112 "genericPointPatchField"
00113 "(const pointPatch&, const Field<Type>&, "
00114 "const dictionary&)",
00115 dict
00116 ) << "\n token following 'nonuniform' "
00117 "is not a compound"
00118 << "\n on patch " << this->patch().name()
00119 << " of field "
00120 << this->dimensionedInternalField().name()
00121 << " in file "
00122 << this->dimensionedInternalField().objectPath()
00123 << exit(FatalIOError);
00124 }
00125 }
00126 else if
00127 (
00128 fieldToken.compoundToken().type()
00129 == token::Compound<List<scalar> >::typeName
00130 )
00131 {
00132 scalarField* fPtr = new scalarField;
00133 fPtr->transfer
00134 (
00135 dynamicCast<token::Compound<List<scalar> > >
00136 (
00137 fieldToken.transferCompoundToken()
00138 )
00139 );
00140
00141 if (fPtr->size() != this->size())
00142 {
00143 FatalIOErrorIn
00144 (
00145 "genericPointPatchField<Type>::"
00146 "genericPointPatchField"
00147 "(const pointPatch&, const Field<Type>&, "
00148 "const dictionary&)",
00149 dict
00150 ) << "\n size of field " << iter().keyword()
00151 << " (" << fPtr->size() << ')'
00152 << " is not the same size as the patch ("
00153 << this->size() << ')'
00154 << "\n on patch " << this->patch().name()
00155 << " of field "
00156 << this->dimensionedInternalField().name()
00157 << " in file "
00158 << this->dimensionedInternalField().objectPath()
00159 << exit(FatalIOError);
00160 }
00161
00162 scalarFields_.insert(iter().keyword(), fPtr);
00163 }
00164 else if
00165 (
00166 fieldToken.compoundToken().type()
00167 == token::Compound<List<vector> >::typeName
00168 )
00169 {
00170 vectorField* fPtr = new vectorField;
00171 fPtr->transfer
00172 (
00173 dynamicCast<token::Compound<List<vector> > >
00174 (
00175 fieldToken.transferCompoundToken()
00176 )
00177 );
00178
00179 if (fPtr->size() != this->size())
00180 {
00181 FatalIOErrorIn
00182 (
00183 "genericPointPatchField<Type>::"
00184 "genericPointPatchField"
00185 "(const pointPatch&, const Field<Type>&, "
00186 "const dictionary&)",
00187 dict
00188 ) << "\n size of field " << iter().keyword()
00189 << " (" << fPtr->size() << ')'
00190 << " is not the same size as the patch ("
00191 << this->size() << ')'
00192 << "\n on patch " << this->patch().name()
00193 << " of field "
00194 << this->dimensionedInternalField().name()
00195 << " in file "
00196 << this->dimensionedInternalField().objectPath()
00197 << exit(FatalIOError);
00198 }
00199
00200 vectorFields_.insert(iter().keyword(), fPtr);
00201 }
00202 else if
00203 (
00204 fieldToken.compoundToken().type()
00205 == token::Compound<List<sphericalTensor> >::typeName
00206 )
00207 {
00208 sphericalTensorField* fPtr = new sphericalTensorField;
00209 fPtr->transfer
00210 (
00211 dynamicCast
00212 <
00213 token::Compound<List<sphericalTensor> >
00214 >
00215 (
00216 fieldToken.transferCompoundToken()
00217 )
00218 );
00219
00220 if (fPtr->size() != this->size())
00221 {
00222 FatalIOErrorIn
00223 (
00224 "genericPointPatchField<Type>::"
00225 "genericPointPatchField"
00226 "(const pointPatch&, const Field<Type>&, "
00227 "const dictionary&)",
00228 dict
00229 ) << "\n size of field " << iter().keyword()
00230 << " (" << fPtr->size() << ')'
00231 << " is not the same size as the patch ("
00232 << this->size() << ')'
00233 << "\n on patch " << this->patch().name()
00234 << " of field "
00235 << this->dimensionedInternalField().name()
00236 << " in file "
00237 << this->dimensionedInternalField().objectPath()
00238 << exit(FatalIOError);
00239 }
00240
00241 sphericalTensorFields_.insert(iter().keyword(), fPtr);
00242 }
00243 else if
00244 (
00245 fieldToken.compoundToken().type()
00246 == token::Compound<List<symmTensor> >::typeName
00247 )
00248 {
00249 symmTensorField* fPtr = new symmTensorField;
00250 fPtr->transfer
00251 (
00252 dynamicCast
00253 <
00254 token::Compound<List<symmTensor> >
00255 >
00256 (
00257 fieldToken.transferCompoundToken()
00258 )
00259 );
00260
00261 if (fPtr->size() != this->size())
00262 {
00263 FatalIOErrorIn
00264 (
00265 "genericPointPatchField<Type>::"
00266 "genericPointPatchField"
00267 "(const pointPatch&, const Field<Type>&, "
00268 "const dictionary&)",
00269 dict
00270 ) << "\n size of field " << iter().keyword()
00271 << " (" << fPtr->size() << ')'
00272 << " is not the same size as the patch ("
00273 << this->size() << ')'
00274 << "\n on patch " << this->patch().name()
00275 << " of field "
00276 << this->dimensionedInternalField().name()
00277 << " in file "
00278 << this->dimensionedInternalField().objectPath()
00279 << exit(FatalIOError);
00280 }
00281
00282 symmTensorFields_.insert(iter().keyword(), fPtr);
00283 }
00284 else if
00285 (
00286 fieldToken.compoundToken().type()
00287 == token::Compound<List<tensor> >::typeName
00288 )
00289 {
00290 tensorField* fPtr = new tensorField;
00291 fPtr->transfer
00292 (
00293 dynamicCast<token::Compound<List<tensor> > >
00294 (
00295 fieldToken.transferCompoundToken()
00296 )
00297 );
00298
00299 if (fPtr->size() != this->size())
00300 {
00301 FatalIOErrorIn
00302 (
00303 "genericPointPatchField<Type>::"
00304 "genericPointPatchField"
00305 "(const pointPatch&, const Field<Type>&, "
00306 "const dictionary&)",
00307 dict
00308 ) << "\n size of field " << iter().keyword()
00309 << " (" << fPtr->size() << ')'
00310 << " is not the same size as the patch ("
00311 << this->size() << ')'
00312 << "\n on patch " << this->patch().name()
00313 << " of field "
00314 << this->dimensionedInternalField().name()
00315 << " in file "
00316 << this->dimensionedInternalField().objectPath()
00317 << exit(FatalIOError);
00318 }
00319
00320 tensorFields_.insert(iter().keyword(), fPtr);
00321 }
00322 else
00323 {
00324 FatalIOErrorIn
00325 (
00326 "genericPointPatchField<Type>::"
00327 "genericPointPatchField"
00328 "(const pointPatch&, const Field<Type>&, "
00329 "const dictionary&)",
00330 dict
00331 ) << "\n compound " << fieldToken.compoundToken()
00332 << " not supported"
00333 << "\n on patch " << this->patch().name()
00334 << " of field "
00335 << this->dimensionedInternalField().name()
00336 << " in file "
00337 << this->dimensionedInternalField().objectPath()
00338 << exit(FatalIOError);
00339 }
00340 }
00341 }
00342 }
00343 }
00344 }
00345
00346
00347 template<class Type>
00348 genericPointPatchField<Type>::genericPointPatchField
00349 (
00350 const genericPointPatchField<Type>& ptf,
00351 const pointPatch& p,
00352 const DimensionedField<Type, pointMesh>& iF,
00353 const pointPatchFieldMapper& mapper
00354 )
00355 :
00356 calculatedPointPatchField<Type>(ptf, p, iF, mapper),
00357 actualTypeName_(ptf.actualTypeName_),
00358 dict_(ptf.dict_)
00359 {
00360 for
00361 (
00362 HashPtrTable<scalarField>::const_iterator iter =
00363 ptf.scalarFields_.begin();
00364 iter != ptf.scalarFields_.end();
00365 ++iter
00366 )
00367 {
00368 scalarFields_.insert(iter.key(), new scalarField(*iter(), mapper));
00369 }
00370
00371 for
00372 (
00373 HashPtrTable<vectorField>::const_iterator iter =
00374 ptf.vectorFields_.begin();
00375 iter != ptf.vectorFields_.end();
00376 ++iter
00377 )
00378 {
00379 vectorFields_.insert(iter.key(), new vectorField(*iter(), mapper));
00380 }
00381
00382 for
00383 (
00384 HashPtrTable<sphericalTensorField>::const_iterator iter =
00385 ptf.sphericalTensorFields_.begin();
00386 iter != ptf.sphericalTensorFields_.end();
00387 ++iter
00388 )
00389 {
00390 sphericalTensorFields_.insert
00391 (
00392 iter.key(),
00393 new sphericalTensorField(*iter(), mapper)
00394 );
00395 }
00396
00397 for
00398 (
00399 HashPtrTable<symmTensorField>::const_iterator iter =
00400 ptf.symmTensorFields_.begin();
00401 iter != ptf.symmTensorFields_.end();
00402 ++iter
00403 )
00404 {
00405 symmTensorFields_.insert
00406 (
00407 iter.key(),
00408 new symmTensorField(*iter(), mapper)
00409 );
00410 }
00411
00412 for
00413 (
00414 HashPtrTable<tensorField>::const_iterator iter =
00415 ptf.tensorFields_.begin();
00416 iter != ptf.tensorFields_.end();
00417 ++iter
00418 )
00419 {
00420 tensorFields_.insert(iter.key(), new tensorField(*iter(), mapper));
00421 }
00422 }
00423
00424
00425 template<class Type>
00426 genericPointPatchField<Type>::genericPointPatchField
00427 (
00428 const genericPointPatchField<Type>& ptf,
00429 const DimensionedField<Type, pointMesh>& iF
00430 )
00431 :
00432 calculatedPointPatchField<Type>(ptf, iF),
00433 actualTypeName_(ptf.actualTypeName_),
00434 dict_(ptf.dict_),
00435 scalarFields_(ptf.scalarFields_),
00436 vectorFields_(ptf.vectorFields_),
00437 sphericalTensorFields_(ptf.sphericalTensorFields_),
00438 symmTensorFields_(ptf.symmTensorFields_),
00439 tensorFields_(ptf.tensorFields_)
00440 {}
00441
00442
00443
00444
00445 template<class Type>
00446 void genericPointPatchField<Type>::autoMap
00447 (
00448 const pointPatchFieldMapper& m
00449 )
00450 {
00451 for
00452 (
00453 HashPtrTable<scalarField>::iterator iter = scalarFields_.begin();
00454 iter != scalarFields_.end();
00455 ++iter
00456 )
00457 {
00458 iter()->autoMap(m);
00459 }
00460
00461 for
00462 (
00463 HashPtrTable<vectorField>::iterator iter = vectorFields_.begin();
00464 iter != vectorFields_.end();
00465 ++iter
00466 )
00467 {
00468 iter()->autoMap(m);
00469 }
00470
00471 for
00472 (
00473 HashPtrTable<sphericalTensorField>::iterator iter =
00474 sphericalTensorFields_.begin();
00475 iter != sphericalTensorFields_.end();
00476 ++iter
00477 )
00478 {
00479 iter()->autoMap(m);
00480 }
00481
00482 for
00483 (
00484 HashPtrTable<symmTensorField>::iterator iter =
00485 symmTensorFields_.begin();
00486 iter != symmTensorFields_.end();
00487 ++iter
00488 )
00489 {
00490 iter()->autoMap(m);
00491 }
00492
00493 for
00494 (
00495 HashPtrTable<tensorField>::iterator iter = tensorFields_.begin();
00496 iter != tensorFields_.end();
00497 ++iter
00498 )
00499 {
00500 iter()->autoMap(m);
00501 }
00502 }
00503
00504
00505 template<class Type>
00506 void genericPointPatchField<Type>::rmap
00507 (
00508 const pointPatchField<Type>& ptf,
00509 const labelList& addr
00510 )
00511 {
00512 const genericPointPatchField<Type>& dptf =
00513 refCast<const genericPointPatchField<Type> >(ptf);
00514
00515 for
00516 (
00517 HashPtrTable<scalarField>::iterator iter = scalarFields_.begin();
00518 iter != scalarFields_.end();
00519 ++iter
00520 )
00521 {
00522 HashPtrTable<scalarField>::const_iterator dptfIter =
00523 dptf.scalarFields_.find(iter.key());
00524
00525 if (dptfIter != scalarFields_.end())
00526 {
00527 iter()->rmap(*dptfIter(), addr);
00528 }
00529 }
00530
00531 for
00532 (
00533 HashPtrTable<vectorField>::iterator iter = vectorFields_.begin();
00534 iter != vectorFields_.end();
00535 ++iter
00536 )
00537 {
00538 HashPtrTable<vectorField>::const_iterator dptfIter =
00539 dptf.vectorFields_.find(iter.key());
00540
00541 if (dptfIter != vectorFields_.end())
00542 {
00543 iter()->rmap(*dptfIter(), addr);
00544 }
00545 }
00546
00547 for
00548 (
00549 HashPtrTable<sphericalTensorField>::iterator iter =
00550 sphericalTensorFields_.begin();
00551 iter != sphericalTensorFields_.end();
00552 ++iter
00553 )
00554 {
00555 HashPtrTable<sphericalTensorField>::const_iterator dptfIter =
00556 dptf.sphericalTensorFields_.find(iter.key());
00557
00558 if (dptfIter != sphericalTensorFields_.end())
00559 {
00560 iter()->rmap(*dptfIter(), addr);
00561 }
00562 }
00563
00564 for
00565 (
00566 HashPtrTable<symmTensorField>::iterator iter =
00567 symmTensorFields_.begin();
00568 iter != symmTensorFields_.end();
00569 ++iter
00570 )
00571 {
00572 HashPtrTable<symmTensorField>::const_iterator dptfIter =
00573 dptf.symmTensorFields_.find(iter.key());
00574
00575 if (dptfIter != symmTensorFields_.end())
00576 {
00577 iter()->rmap(*dptfIter(), addr);
00578 }
00579 }
00580
00581 for
00582 (
00583 HashPtrTable<tensorField>::iterator iter = tensorFields_.begin();
00584 iter != tensorFields_.end();
00585 ++iter
00586 )
00587 {
00588 HashPtrTable<tensorField>::const_iterator dptfIter =
00589 dptf.tensorFields_.find(iter.key());
00590
00591 if (dptfIter != tensorFields_.end())
00592 {
00593 iter()->rmap(*dptfIter(), addr);
00594 }
00595 }
00596 }
00597
00598
00599 template<class Type>
00600 void genericPointPatchField<Type>::write(Ostream& os) const
00601 {
00602 os.writeKeyword("type") << actualTypeName_ << token::END_STATEMENT << nl;
00603
00604 for
00605 (
00606 dictionary::const_iterator iter = dict_.begin();
00607 iter != dict_.end();
00608 ++iter
00609 )
00610 {
00611 if (iter().keyword() != "type")
00612 {
00613 if
00614 (
00615 iter().isStream()
00616 && iter().stream().size()
00617 && iter().stream()[0].isWord()
00618 && iter().stream()[0].wordToken() == "nonuniform"
00619 )
00620 {
00621 if (scalarFields_.found(iter().keyword()))
00622 {
00623 scalarFields_.find(iter().keyword())()
00624 ->writeEntry(iter().keyword(), os);
00625 }
00626 else if (vectorFields_.found(iter().keyword()))
00627 {
00628 vectorFields_.find(iter().keyword())()
00629 ->writeEntry(iter().keyword(), os);
00630 }
00631 else if (sphericalTensorFields_.found(iter().keyword()))
00632 {
00633 sphericalTensorFields_.find(iter().keyword())()
00634 ->writeEntry(iter().keyword(), os);
00635 }
00636 else if (symmTensorFields_.found(iter().keyword()))
00637 {
00638 symmTensorFields_.find(iter().keyword())()
00639 ->writeEntry(iter().keyword(), os);
00640 }
00641 else if (tensorFields_.found(iter().keyword()))
00642 {
00643 tensorFields_.find(iter().keyword())()
00644 ->writeEntry(iter().keyword(), os);
00645 }
00646 }
00647 else
00648 {
00649 iter().write(os);
00650 }
00651 }
00652 }
00653 }
00654
00655
00656
00657
00658 }
00659
00660