OOFEM  2.4
OOFEM.org - Object Oriented Finite Element Solver
nonlocalmatwtp.C
Go to the documentation of this file.
1 /*
2  *
3  * ##### ##### ###### ###### ### ###
4  * ## ## ## ## ## ## ## ### ##
5  * ## ## ## ## #### #### ## # ##
6  * ## ## ## ## ## ## ## ##
7  * ## ## ## ## ## ## ## ##
8  * ##### ##### ## ###### ## ##
9  *
10  *
11  * OOFEM : Object Oriented Finite Element Code
12  *
13  * Copyright (C) 1993 - 2013 Borek Patzak
14  *
15  *
16  *
17  * Czech Technical University, Faculty of Civil Engineering,
18  * Department of Structural Mechanics, 166 29 Prague, Czech Republic
19  *
20  * This library is free software; you can redistribute it and/or
21  * modify it under the terms of the GNU Lesser General Public
22  * License as published by the Free Software Foundation; either
23  * version 2.1 of the License, or (at your option) any later version.
24  *
25  * This program is distributed in the hope that it will be useful,
26  * but WITHOUT ANY WARRANTY; without even the implied warranty of
27  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
28  * Lesser General Public License for more details.
29  *
30  * You should have received a copy of the GNU Lesser General Public
31  * License along with this library; if not, write to the Free Software
32  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
33  */
34 
35 #include "nonlocalmatwtp.h"
36 #include "nonlocalmaterialext.h"
37 #include "element.h"
38 #include "dofmanager.h"
39 #include "engngm.h"
40 #include "gausspoint.h"
41 #include "material.h"
42 #include "communicator.h"
43 #include "datastream.h"
45 #include "classfactory.h"
46 
47 #include <set>
48 
49 namespace oofem {
50 #define NonlocalMaterialWTP_DEBUG_PRINT 0
51 
52 /*
53  * Returns array storing nonlocal dependency
54  * (in terms of global element numbers) for given element
55  */
56 void
58 {
59  std :: set< int >relems;
60  std :: set< int > :: const_iterator relemsIter;
61  Element *ielem = d->giveElement(num);
62 
64  relems.clear();
65  // loop over element IRules and their IPs to retrieve remote (nonlocal) elements
66  // store their global numbers in the relems set (to avoid redundancy)
67  // and then keep them in nonlocTables array.
69 
70  // convert relems set into an int array
71  // and store it
72  answer.resize( relems.size() );
73  int _i = 1;
74  for ( int relem: relems ) {
75  answer.at(_i++) = relem;
76  }
77  } else {
78  answer.clear();
79  }
80 }
81 
82 
83 void
85 {
86  int remoteElemNum;
87 
89  static_cast< NonlocalMaterialStatusExtensionInterface * >( gp->giveMaterialStatus()->
91  if ( interface ) {
92  auto lir = interface->giveIntegrationDomainList();
93 
94  for ( auto &intdom: *lir ) {
95  remoteElemNum = ( intdom.nearGp )->giveElement()->giveGlobalNumber();
96  s.insert(remoteElemNum);
97  }
98  }
99 }
100 
101 
102 /*
103  * prepares the communication maps for remote elements
104  * should be called immediately after load balancing,
105  * before any work transfer.
106  *
107  */
108 void
110 {
111  int ie, gie, nelem = domain->giveNumberOfElements();
112  EngngModel *emodel = domain->giveEngngModel();
113  Element *elem;
114  int nproc = emodel->giveNumberOfProcesses();
115  int myrank = emodel->giveRank();
116  CommunicatorBuff cb(nproc, CBT_dynamic);
117  Communicator com(emodel, &cb, myrank, nproc, CommMode_Dynamic);
118  this->nonlocElementDependencyMap.clear();
119 
120  // build nonlocal element dependency array for each element
121  for ( ie = 1; ie <= nelem; ie++ ) {
122  elem = domain->giveElement(ie);
123  if ( ( elem->giveParallelMode() == Element_local ) ) {
124  gie = elem->giveGlobalNumber();
125  this->giveElementNonlocalDepArry(nonlocElementDependencyMap [ gie ], domain, ie);
126  }
127  }
128 
129  /* send and receive nonlocElementDependencyArry of migrating elements to remote partition */
133  com.finishExchange();
134 }
135 
136 
137 
138 /*
139  * should be called after basic local migration is finalized,
140  * when all local elements are already available
141  */
142 void
144 {
145  Domain *domain = this->lb->giveDomain();
146  EngngModel *emodel = domain->giveEngngModel();
147  int nproc = emodel->giveNumberOfProcesses();
148  int myrank = emodel->giveRank();
149  CommunicatorBuff cb(nproc, CBT_dynamic);
150  Communicator com(emodel, &cb, myrank, nproc, CommMode_Dynamic);
151  StaticCommunicationBuffer commBuff(MPI_COMM_WORLD);
152 
153  /*
154  * build domain nonlocal element dependency list. Then exclude local elements - what remains are unsatisfied
155  * remote dependencies that have to be broadcasted and received from partitions owning relevant elements
156  */
157  int _locsize, i, _i, ie, _size, _globnum, result, nelems = domain->giveNumberOfElements();
158  int _globsize, _val;
159  Element *elem;
160  std :: set< int >domainElementDepSet;
161  // loop over each element dep list to assemble domain list
162  for ( ie = 1; ie <= nelems; ie++ ) {
163  elem = domain->giveElement(ie);
164  if ( ( elem->giveParallelMode() == Element_local ) ) {
165  _globnum = elem->giveGlobalNumber();
166  IntArray &iedep = nonlocElementDependencyMap [ _globnum ];
167  _size = iedep.giveSize();
168  for ( _i = 1; _i <= _size; _i++ ) {
169  domainElementDepSet.insert( iedep.at(_i) );
170  }
171 
172 #if NonlocalMaterialWTP_DEBUG_PRINT
173  fprintf(stderr, "[%d] element %d dependency:", myrank, _globnum);
174  for ( _i = 1; _i <= _size; _i++ ) {
175  fprintf( stderr, "%d ", iedep.at(_i) );
176  }
177 
178  fprintf(stderr, "\n");
179 #endif
180  }
181  }
182 
183 #if NonlocalMaterialWTP_DEBUG_PRINT
184  fprintf(stderr, "[%d] nonlocal domain dependency:", myrank);
185  for ( int eldep: domainElementDepSet ) {
186  fprintf(stderr, "%d ", eldep);
187  }
188 
189  fprintf(stderr, "\n");
190 #endif
191 
192  // now exclude local elements (local dependency is always satisfied)
193  for ( _i = 1; _i <= nelems; _i++ ) {
194  elem = domain->giveElement(_i);
195  if ( elem->giveParallelMode() == Element_local ) {
196  domainElementDepSet.erase( elem->giveGlobalNumber() );
197  }
198  }
199 
200 #if NonlocalMaterialWTP_DEBUG_PRINT
201  fprintf(stderr, "[%d] remote elem wish list:", myrank);
202  for ( int eldep: domainElementDepSet ) {
203  fprintf(stderr, "%d ", eldep);
204  }
205 
206  fprintf(stderr, "\n");
207 #endif
208 
209  // broadcast remaining elements (unsatisfied domain nonlocal dependency) to remaining partitions
210  _locsize = domainElementDepSet.size() + 1;
211  result = MPI_Allreduce(& _locsize, & _globsize, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD);
212  if ( result != MPI_SUCCESS ) {
213  OOFEM_ERROR("MPI_Allreduce to determine broadcast buffer size failed");
214  }
215 
216  commBuff.resize( commBuff.givePackSizeOfInt(_globsize) );
217  // remote domain wish list
218  std :: set< int >remoteWishSet;
219 
220  toSendList.resize(nproc);
221  for ( i = 0; i < nproc; i++ ) { // loop over partitions
222  commBuff.init();
223  toSendList [ i ].clear();
224  if ( i == myrank ) {
225  // current domain has to send its receive wish list to all domains
226  commBuff.write(_locsize);
227  for ( int eldep: domainElementDepSet ) {
228  commBuff.write(eldep);
229  }
230 
231  result = commBuff.bcast(i);
232  } else {
233  // unpack remote domain wish list
234  remoteWishSet.clear();
235  result = commBuff.bcast(i);
236  // unpack size
237  commBuff.read(_size);
238  for ( _i = 1; _i < _size; _i++ ) {
239  commBuff.read(_val);
240  remoteWishSet.insert(_val);
241  }
242 
243  // determine which local elements are to be sent to remotepartition
244  for ( _i = 1; _i <= nelems; _i++ ) {
245  elem = domain->giveElement(_i);
246  if ( elem->giveParallelMode() == Element_local ) {
247  if ( remoteWishSet.find( elem->giveGlobalNumber() ) != remoteWishSet.end() ) {
248  // store local element number
249  toSendList [ i ].push_back(_i);
250  }
251  }
252  }
253  }
254  } // end loop over partitions broadcast
255 
256 #if NonlocalMaterialWTP_DEBUG_PRINT
257  for ( i = 0; i < nproc; i++ ) { // loop over partitions
258  // print some info
259  fprintf(stderr, "[%d] elements scheduled for mirroring at [%d]:",
260  myrank, i);
261  for ( int elnum: toSendList [ i ] ) {
262  fprintf( stderr, "%d[%d] ", elnum, domain->giveElement(elnum)->giveGlobalNumber() );
263  }
264 
265  fprintf(stderr, "\n");
266  }
267 
268 #endif
269 
270 
271 
272 
273 
277  com.finishExchange();
278 
279  domain->commitTransactions( domain->giveTransactionManager() );
280 
281 #ifdef __VERBOSE_PARALLEL
282  VERBOSEPARALLEL_PRINT("NonlocalMaterialWTP::migrate", "Finished migrating remote elements", myrank);
283 #endif
284 }
285 
286 
287 void
289 {
290  /* Now the question is how to use nonlocElementDependencyMap, which is available for
291  * each element, to fastly reinitialize nonlocal integration tables.
292  *
293  * if not needed, should be deleted at the end of migrate method, to free memory
294  */
296  // delete element dep arrays
298 }
299 
300 
302 {
303  int myrank = d->giveEngngModel()->giveRank();
304  int iproc = pc.giveRank();
305 
306  if ( iproc == myrank ) {
307  return 1; // skip local partition
308  }
309 
310  // query process communicator to use
312 
313  int ielem, nelem = d->giveNumberOfElements();
314  int _globnum;
315  Element *elem;
316 
317  for ( ielem = 1; ielem <= nelem; ielem++ ) { // begin loop over elements
318  elem = d->giveElement(ielem);
319  if ( ( elem->giveParallelMode() == Element_local ) &&
320  ( lb->giveElementPartition(ielem) == iproc ) ) {
321  // pack local element (node numbers shuld be global ones!!!)
322  // pack type
323  _globnum = elem->giveGlobalNumber();
324  pcbuff->write(_globnum);
325  nonlocElementDependencyMap [ _globnum ].storeYourself(*pcbuff);
326  }
327  } // end loop over elements
328 
329  // pack end-of-element-record
331 
332  return 1;
333 }
334 
336 {
337  int myrank = d->giveEngngModel()->giveRank();
338  int iproc = pc.giveRank();
339  int _globnum;
340 
341  if ( iproc == myrank ) {
342  return 1; // skip local partition
343  }
344 
345  // query process communicator to use
347 
348  // unpack element data
349  do {
350  pcbuff->read(_globnum);
351  if ( _globnum == NonlocalMaterialWTP_END_DATA ) {
352  break;
353  }
354 
355  nonlocElementDependencyMap [ _globnum ].restoreYourself(*pcbuff);
356  } while ( 1 );
357 
358  return 1;
359 }
360 
362 {
363  int myrank = d->giveEngngModel()->giveRank();
364  int iproc = pc.giveRank();
365  int nnodes, inode;
366  DofManager *node, *dofman;
367  Element *elem;
368 
369  if ( iproc == myrank ) {
370  return 1; // skip local partition
371  }
372 
373  // query process communicator to use
375 
376  // here we have to pack also nodes that are shared by packed elements !!!
377  // assemble set of nodes needed by those elements
378  // these have to be send (except those that are shared)
379  std :: set< int >nodesToSend;
380  for ( int ie: toSendList [ iproc ] ) {
381  //ie = d->elementGlobal2Local(gie);
382  elem = d->giveElement(ie);
383  nnodes = elem->giveNumberOfDofManagers();
384  for ( int i = 1; i <= nnodes; i++ ) {
385  node = elem->giveDofManager(i);
386  if ( ( node->giveParallelMode() == DofManager_local ) ||
387  ( node->isShared() && !node->givePartitionList()->contains(iproc) ) ) {
388  nodesToSend.insert( node->giveGlobalNumber() );
389  }
390  }
391  }
392 
393  // pack nodes that become null nodes on remote partition
394  for ( int in: nodesToSend ) {
395  inode = d->dofmanGlobal2Local(in);
396  dofman = d->giveDofManager(inode);
397  pcbuff->write( dofman->giveInputRecordName() );
399  }
400 
401  pcbuff->write("");
402 
403  for ( int ie: toSendList [ iproc ] ) {
404  //ie = d->elementGlobal2Local(gie);
405  elem = d->giveElement(ie);
406  // pack local element (node numbers shuld be global ones!!!)
407  // pack type
408  pcbuff->write( elem->giveInputRecordName() );
410  }
411 
412  pcbuff->write("");
413 
414 
415  return 1;
416 }
417 
419 {
420  int myrank = d->giveEngngModel()->giveRank();
421  int iproc = pc.giveRank();
422  std :: string _type;
423  DofManager *dofman;
424  IntArray _partitions;
425 
426  if ( iproc == myrank ) {
427  return 1; // skip local partition
428  }
429 
430  // query process communicator to use
432 
433  // unpack dofman data
434  do {
435  pcbuff->read(_type);
436  if ( _type.size() == 0 ) {
437  break;
438  }
439  dofman = classFactory.createDofManager(_type.c_str(), 0, d);
442  if ( d->dofmanGlobal2Local( dofman->giveGlobalNumber() ) ) {
443  // record already exist
444  delete dofman;
445  } else {
447  dofman->giveGlobalNumber(),
448  dofman);
449  }
450  } while ( 1 );
451 
452 
453  // unpack element data
454  Element *elem;
455  _partitions.resize(1);
456  _partitions.at(1) = iproc;
457  do {
458  pcbuff->read(_type);
459  if ( _type.size() == 0 ) {
460  break;
461  }
462 
463  elem = classFactory.createElement(_type.c_str(), 0, d);
464  elem->restoreContext(*pcbuff, CM_Definition | CM_State);
466  elem->setPartitionList(_partitions);
468  elem->giveGlobalNumber(), elem);
469  } while ( 1 );
470 
471  return 1;
472 }
473 
474 /* Now the question is how to use nonlocElementDependencyMap, which is available for
475  * each element, to quickly reinitialize nonlocal integration tables.
476  *
477  * if not needed, should be deleted at the end of migrate method, to free memory
478  *
479  * first the existing data should be cleared, and new ones initialized
480  * profiting from nonlocElementDependencyMap, that is available for all
481  * local elements.
482  */
483 
484 void
486 {
487  Domain *d = lb->giveDomain();
488  int n, i, globnum, ie, nelem = d->giveNumberOfElements();
489  IntArray localElementDep;
490  Element *elem;
491 
492  // build nonlocal element dependency array for each element
493  for ( ie = 1; ie <= nelem; ie++ ) {
494  elem = d->giveElement(ie);
495  if ( ( elem->giveParallelMode() == Element_local ) ) {
496  IntArray localMap;
497  // translate here nonlocElementDependencyMap[_globnum] to corresponding local numbers
498  globnum = elem->giveGlobalNumber();
499  n = nonlocElementDependencyMap [ globnum ].giveSize();
500  localElementDep.resize(n);
501  for ( i = 1; i <= n; i++ ) {
502  localElementDep.at(i) = d->elementGlobal2Local( nonlocElementDependencyMap [ globnum ].at(i) );
503  }
504 
506  }
507  }
508 }
509 
510 
511 
512 void
514 {
515  Element *elem = gp->giveElement();
517  if ( iface ) {
518  iface->rebuildNonlocalPointTable(gp, & map);
519  }
520 }
521 } // end namespace oofem
bool contains(int value) const
Definition: intarray.h:283
int addDofManTransaction(DomainTransactionType, int, DofManager *)
int initExchange(int tag)
Initializes data exchange with all problems.
Definition: communicator.C:104
DofManager in active domain is shared only by remote elements (these are only introduced for nonlocal...
Definition: dofmanager.h:88
Class and object Domain.
Definition: domain.h:115
int addElementTransaction(DomainTransactionType, int, Element *)
Abstract base class for all nonlocal materials.
int giveGlobalNumber() const
Definition: dofmanager.h:501
int packAllData(T *ptr, int(T::*packFunc)(ProcessCommunicator &))
Pack all problemCommunicators data to their send buffers.
Definition: communicator.h:223
void init(Domain *d)
Initializes receiver; should be called before any work transfer.
int giveGlobalNumber() const
Definition: element.h:1059
#define CM_State
Definition: contextmode.h:46
int commitTransactions(DomainTransactionManager *tm)
Commits transactions recorded in transaction manager.
Definition: domain.C:1619
virtual const char * giveInputRecordName() const =0
const IntArray * givePartitionList()
Returns partition list of receiver.
Definition: dofmanager.h:519
EngngModel * giveEngngModel()
Returns engineering model to which receiver is associated.
Definition: domain.C:433
int elementGlobal2Local(int _globnum)
Definition: domain.C:1876
Abstract base class for all finite elements.
Definition: element.h:145
DofManager * createDofManager(const char *name, int num, Domain *domain)
Creates new instance of Dof manager corresponding to given keyword.
Definition: classfactory.C:169
Base class for dof managers.
Definition: dofmanager.h:113
int giveNumberOfProcesses() const
Returns the number of collaborating processes.
Definition: engngm.h:1060
Element * giveElement()
Returns corresponding element to receiver.
Definition: gausspoint.h:188
Abstract base class for all nonlocal constitutive model statuses.
std::vector< localIntegrationRecord > * giveIntegrationDomainList()
Returns integration list of receiver.
int giveNumberOfElements() const
Returns number of elements in domain.
Definition: domain.h:434
The ProcessCommunicator and corresponding buffers (represented by this class) are separated in order ...
Definition: processcomm.h:64
int giveRank()
Returns corresponding rank of associated partition.
Definition: processcomm.h:207
std::map< int, IntArray > nonlocElementDependencyMap
Class implementing an array of integers.
Definition: intarray.h:61
int & at(int i)
Coefficient access function.
Definition: intarray.h:103
virtual int giveNumberOfDofManagers() const
Definition: element.h:656
void setParallelMode(dofManagerParallelMode _mode)
Sets parallel mode of receiver.
Definition: dofmanager.h:514
int finishExchange()
Finishes the exchange.
Definition: communicator.C:115
void setParallelMode(elementParallelMode _mode)
Sets parallel mode of element.
Definition: element.h:1071
virtual contextIOResultType restoreContext(DataStream &stream, ContextMode mode, void *obj=NULL)
Restores the receiver state previously written in stream.
Definition: element.C:970
#define MIGRATE_REMOTE_ELEMENTS_TAG
int unpackMigratingElementDependencies(Domain *d, ProcessCommunicator &pc)
#define MIGRATE_NONLOCALDEP_TAG
int unpackRemoteElements(Domain *d, ProcessCommunicator &pc)
DomainTransactionManager * giveTransactionManager()
Returns domain transaction manager.
Definition: domain.C:1606
Element * giveElement(int n)
Service for accessing particular domain fe element.
Definition: domain.C:160
virtual int write(const int *data, int count)
Writes count integer values from array pointed by data.
Definition: processcomm.h:83
virtual int givePackSizeOfInt(int count)
Definition: combuff.C:271
#define OOFEM_ERROR(...)
Definition: error.h:61
virtual int resize(int newSize)
Resizes buffer to given size.
Definition: combuff.h:314
void clear()
Clears the array (zero size).
Definition: intarray.h:177
ProcessCommunicatorBuff * giveProcessCommunicatorBuff()
Returns communication buffer.
Definition: processcomm.h:210
#define VERBOSEPARALLEL_PRINT(service, str, rank)
Definition: parallel.h:50
virtual void init()
Initializes buffer to empty state.
Definition: combuff.h:316
Class representing process communicator for engineering model.
Definition: processcomm.h:176
int packRemoteElements(Domain *d, ProcessCommunicator &pc)
virtual int read(int *dest, int n)
Reads count integer values into array pointed by data.
Definition: combuff.h:333
void resize(int n)
Checks size of receiver towards requested bounds.
Definition: intarray.C:124
(Dynamic) In this case the communication pattern and the amount of data sent between nodes is not kno...
int unpackAllData(T *ptr, int(T::*unpackFunc)(ProcessCommunicator &))
Unpack all problemCommuncators data from recv buffers.
Definition: communicator.h:262
std::vector< std::list< int > > toSendList
virtual contextIOResultType saveContext(DataStream &stream, ContextMode mode, void *obj=NULL)
Stores receiver state to output stream.
Definition: dofmanager.C:543
elementParallelMode giveParallelMode() const
Return elementParallelMode of receiver.
Definition: element.h:1069
int dofmanGlobal2Local(int _globnum)
Definition: domain.C:1864
void rebuildNonlocalPointTable(GaussPoint *gp, IntArray *contributingElems)
Rebuild list of integration points which take part in nonlocal average in given integration point...
void giveElementNonlocalDepArry(IntArray &answer, Domain *d, int num)
Element is local, there are no contributions from other domains to this element.
Definition: element.h:101
void giveNonlocalDepArryElementPlugin(GaussPoint *gp, std::set< int > &s)
Plugin for Element::ipEvaluator service to compile nonlocal dependency array for given element (in ca...
void migrate()
Migrates necessary local elements to remote processors, where they become remote elements needed to e...
IntegrationPointStatus * giveMaterialStatus()
Returns reference to associated material status (NULL if not defined).
Definition: gausspoint.h:205
Class representing communicator.
Definition: communicator.h:105
int packMigratingElementDependencies(Domain *d, ProcessCommunicator &pc)
#define CM_DefinitionGlobal
Definition: contextmode.h:48
virtual int giveElementPartition(int ielem)=0
Returns the new partition number assigned to local element after LB.
void setPartitionList(IntArray &pl)
Sets partition list of receiver.
Definition: element.h:1112
virtual contextIOResultType saveContext(DataStream &stream, ContextMode mode, void *obj=NULL)
Stores receiver state to output stream.
Definition: element.C:885
virtual int bcast(int root)
Initializes broadcast over collaborating processes.
Definition: combuff.h:354
int giveRank() const
Returns domain rank in a group of collaborating processes (0..groupSize-1)
Definition: engngm.h:1058
virtual Interface * giveInterface(InterfaceType t)
Interface requesting service.
Definition: femcmpnn.h:179
ClassFactory & classFactory
Definition: classfactory.C:59
virtual int write(const int *src, int n)
Writes count integer values from array pointed by data.
Definition: combuff.h:321
#define CM_Definition
Definition: contextmode.h:47
Element in active domain is only mirror of some remote element.
Definition: element.h:102
virtual int read(int *data, int count)
Reads count integer values into array pointed by data.
Definition: processcomm.h:91
void ipEvaluator(T *src, void(T::*f)(GaussPoint *gp))
Integration point evaluator, loops over receiver IP&#39;s and calls given function (passed as f parameter...
Definition: element.h:1162
The Communicator and corresponding buffers (represented by this class) are separated in order to allo...
Definition: communicator.h:60
Abstract base class representing the "problem" under consideration.
Definition: engngm.h:181
Domain * giveDomain()
Returns reference to its domain.
Definition: loadbalancer.h:160
int giveSize() const
Definition: intarray.h:203
void fastElementIPNonlocTableUpdater(GaussPoint *gp, IntArray &map)
Element plugin for ipEvaluator service to update nonlocal tables of its integration points from given...
the oofem namespace is to define a context or scope in which all oofem names are defined.
DofManager * giveDofManager(int i) const
Definition: element.C:514
void update()
Called after all wtps migrated their data.
DofManager * giveDofManager(int n)
Service for accessing particular domain dof manager.
Definition: domain.C:314
void fastRebuildNonlocalTables()
Rebuilds nonlocal integration tables in element gauss points using information in nonlocElementDepend...
Element * createElement(const char *name, int num, Domain *domain)
Creates new instance of element corresponding to given keyword.
Definition: classfactory.C:159
bool isShared()
Returns true if receiver is shared.
Definition: dofmanager.h:538
#define NonlocalMaterialWTP_END_DATA
End-of-data marker, used to identify end of data stream received.
DofManager is local, there are no contribution from other domains to this DofManager.
Definition: dofmanager.h:81
virtual contextIOResultType restoreContext(DataStream &stream, ContextMode mode, void *obj=NULL)
Restores the receiver state previously written in stream.
Definition: dofmanager.C:611
#define CM_UnknownDictState
Definition: contextmode.h:49
Class representing integration point in finite element program.
Definition: gausspoint.h:93
dofManagerParallelMode giveParallelMode() const
Return dofManagerParallelMode of receiver.
Definition: dofmanager.h:512
virtual Material * giveMaterial()
Definition: element.C:484

This page is part of the OOFEM documentation. Copyright (c) 2011 Borek Patzak
Project e-mail: info@oofem.org
Generated at Tue Jan 2 2018 20:07:30 for OOFEM by doxygen 1.8.11 written by Dimitri van Heesch, © 1997-2011