libMesh
rb_construction_base.C
Go to the documentation of this file.
1 // rbOOmit: An implementation of the Certified Reduced Basis method.
2 // Copyright (C) 2009, 2010 David J. Knezevic
3 
4 // This file is part of rbOOmit.
5 
6 // rbOOmit is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU Lesser General Public
8 // License as published by the Free Software Foundation; either
9 // version 2.1 of the License, or (at your option) any later version.
10 
11 // rbOOmit is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 // Lesser General Public License for more details.
15 
16 // You should have received a copy of the GNU Lesser General Public
17 // License along with this library; if not, write to the Free Software
18 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 
20 // C++ includes
21 #include <ctime>
22 #include <cstdlib> // *must* precede <cmath> for proper std:abs() on PGI, Sun Studio CC
23 #include <cmath>
24 
25 // rbOOmit includes
26 #include "libmesh/rb_construction_base.h"
27 
28 // libMesh includes
29 #include "libmesh/libmesh_logging.h"
30 #include "libmesh/numeric_vector.h"
31 #include "libmesh/equation_systems.h"
32 #include "libmesh/parallel.h"
33 #include "libmesh/petsc_linear_solver.h"
34 // Includes for template instantiation
35 #include "libmesh/condensed_eigen_system.h"
36 #include "libmesh/linear_implicit_system.h"
37 
38 namespace libMesh
39 {
40 
41 // ------------------------------------------------------------
42 // RBConstructionBase implementation
43 
44 
45 template <class Base>
47  const std::string & name_in,
48  const unsigned int number_in)
49  : Base(es, name_in, number_in),
50  serial_training_set(false),
51  training_parameters_initialized(false),
52  training_parameters_random_seed(-1) // by default, use std::time to seed RNG
53 {
54  training_parameters.clear();
55 }
56 
57 template <class Base>
59 {
60  this->clear();
61 }
62 
63 template <class Base>
65 {
66  // clear the parent data
67  Base::clear();
69 
70  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters.begin();
71  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters.end();
72 
73  for ( ; it != it_end; ++it)
74  {
75  NumericVector<Number> * training_vector = it->second;
76  delete training_vector;
77  training_vector = libmesh_nullptr;
78  }
79  training_parameters.clear();
80 }
81 
82 template <class Base>
84 {
85  Base::init_data();
86 
87  // Initialize the inner product storage vector, which is useful for
88  // storing intermediate results when evaluating inner products
90  inner_product_storage_vector->init (this->n_dofs(), this->n_local_dofs(), false, PARALLEL);
91 }
92 
93 template <class Base>
95  std::pair<numeric_index_type, Real> & error_pair)
96 {
97  // Set error_pair.second to the maximum global value and also
98  // find which processor contains the maximum value
99  unsigned int proc_ID_index;
100  communicator.maxloc(error_pair.second, proc_ID_index);
101 
102  // Then broadcast error_pair.first from proc_ID_index
103  communicator.broadcast(error_pair.first, proc_ID_index);
104 }
105 
106 template <class Base>
108 {
110 
111  if (training_parameters.empty())
112  return 0;
113 
114  return training_parameters.begin()->second->size();
115 }
116 
117 template <class Base>
119 {
121  return training_parameters.begin()->second->local_size();
122 }
123 
124 template <class Base>
126 {
128  return training_parameters.begin()->second->first_local_index();
129 }
130 
131 template <class Base>
133 {
135  return training_parameters.begin()->second->last_local_index();
136 }
137 
138 template <class Base>
140 {
142 }
143 
144 template <class Base>
146 {
148 
149  libmesh_assert( (this->get_first_local_training_index() <= index) &&
150  (index < this->get_last_local_training_index()) );
151 
152  RBParameters params;
153  std::map<std::string, NumericVector<Number> *>::const_iterator it = training_parameters.begin();
154  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters.end();
155  for ( ; it != it_end; ++it)
156  {
157  std::string param_name = it->first;
158  Real param_value = libmesh_real( ( *(it->second) )(index) );
159 
160  params.set_value(param_name, param_value);
161  }
162 
163  return params;
164 }
165 
166 template <class Base>
168 {
170 
171  processor_id_type root_id = 0;
172  if ((this->get_first_local_training_index() <= index) &&
173  (index < this->get_last_local_training_index()))
174  {
175  // Set parameters on only one processor
177 
178  // set root_id, only non-zero on one processor
179  root_id = this->processor_id();
180  }
181 
182  // broadcast
183  this->comm().max(root_id);
184  broadcast_parameters(root_id);
185 }
186 
187 template <class Base>
189  const RBParameters & mu_max,
190  unsigned int n_training_samples,
191  std::map<std::string,bool> log_param_scale,
192  bool deterministic)
193 {
194  // Print out some info about the training set initialization
195  libMesh::out << "Initializing training parameters with "
196  << (deterministic ? "deterministic " : "random " )
197  << "training set..." << std::endl;
198 
199  {
200  std::map<std::string,bool>::iterator it = log_param_scale.begin();
201  std::map<std::string,bool>::const_iterator it_end = log_param_scale.end();
202  for (; it != it_end; ++it)
203  {
204  libMesh::out << "Parameter "
205  << it->first
206  << ": log scaling = "
207  << it->second
208  << std::endl;
209  }
210  }
211  libMesh::out << std::endl;
212 
213  if (deterministic)
214  {
216  log_param_scale,
218  n_training_samples,
219  mu_min,
220  mu_max,
222  }
223  else
224  {
225  // Generate random training samples for all parameters
227  log_param_scale,
229  n_training_samples,
230  mu_min,
231  mu_max,
234  }
235 
236  // For each parameter that only allows discrete values, we "snap" to the nearest
237  // allowable discrete value
238  if (get_n_discrete_params() > 0)
239  {
240  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters.begin();
241  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters.end();
242  for ( ; it != it_end; ++it)
243  {
244  std::string param_name = it->first;
245  if (is_discrete_parameter(param_name))
246  {
247  std::vector<Real> discrete_values =
248  get_discrete_parameter_values().find(param_name)->second;
249 
250  NumericVector<Number> * training_vector = it->second;
251 
252  for (numeric_index_type index=training_vector->first_local_index();
253  index<training_vector->last_local_index();
254  index++)
255  {
256  Real value = libmesh_real((*training_vector)(index));
257  Real nearest_discrete_value = get_closest_value(value, discrete_values);
258  training_vector->set(index, nearest_discrete_value);
259  }
260  }
261  }
262  }
263 
265 }
266 
267 template <class Base>
268 void RBConstructionBase<Base>::load_training_set(std::map<std::string, std::vector<Number>> & new_training_set)
269 {
270  // First, make sure that an initial training set has already been
271  // generated
273  libmesh_error_msg("Error: load_training_set cannot be used to initialize parameters");
274 
275  // Make sure that the training set has the correct number of parameters
276  if (new_training_set.size() != get_n_params())
277  libmesh_error_msg("Error: Incorrect number of parameters in load_training_set.");
278 
279  // Clear the training set
280  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters.begin();
281  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters.end();
282  for ( ; it != it_end; ++it)
283  {
284  NumericVector<Number> * training_vector = it->second;
285  delete training_vector;
286  training_vector = libmesh_nullptr;
287  }
288 
289  // Get the number of local and global training parameters
290  numeric_index_type n_local_training_samples =
291  cast_int<numeric_index_type>(new_training_set.begin()->second.size());
292  numeric_index_type n_global_training_samples = n_local_training_samples;
293  this->comm().sum(n_global_training_samples);
294 
295  it = training_parameters.begin();
296  for ( ; it != it_end; ++it)
297  {
298  it->second = NumericVector<Number>::build(this->comm()).release();
299  it->second->init(n_global_training_samples, n_local_training_samples, false, PARALLEL);
300  }
301 
302  it = training_parameters.begin();
303  for ( ; it != it_end; ++it)
304  {
305  std::string param_name = it->first;
306  NumericVector<Number> * training_vector = it->second;
307 
308  numeric_index_type first_index = training_vector->first_local_index();
309  for (numeric_index_type i=0; i<n_local_training_samples; i++)
310  {
311  numeric_index_type index = first_index + i;
312  training_vector->set(index, new_training_set[param_name][i]);
313  }
314  }
315 }
316 
317 
318 template <class Base>
320  std::map<std::string, bool> log_param_scale,
321  std::map<std::string, NumericVector<Number> *> & training_parameters_in,
322  unsigned int n_training_samples_in,
323  const RBParameters & min_parameters,
324  const RBParameters & max_parameters,
326  bool serial_training_set)
327 {
328  libmesh_assert_equal_to ( min_parameters.n_parameters(), max_parameters.n_parameters() );
329  const unsigned int num_params = min_parameters.n_parameters();
330 
331  // Clear training_parameters_in
332  {
333  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters_in.begin();
334  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters_in.end();
335 
336  for ( ; it != it_end; ++it)
337  {
338  NumericVector<Number> * training_vector = it->second;
339  delete training_vector;
340  training_vector = libmesh_nullptr;
341  }
342  training_parameters_in.clear();
343  }
344 
345  if (num_params == 0)
346  return;
347 
348  if (training_parameters_random_seed < 0)
349  {
350  if (!serial_training_set)
351  {
352  // seed the random number generator with the system time
353  // and the processor ID so that the seed is different
354  // on different processors
355  std::srand( static_cast<unsigned>( std::time(0)*(1+communicator.rank()) ));
356  }
357  else
358  {
359  // seed the random number generator with the system time
360  // only so that the seed is the same on all processors
361  //
362  // Note that we broadcast the time on processor 0 to make
363  // sure all processors agree.
364  unsigned int current_time = static_cast<unsigned>( std::time(0) );
365  communicator.broadcast(current_time, 0);
366  std::srand(current_time);
367  }
368  }
369  else
370  {
371  if (!serial_training_set)
372  {
373  // seed the random number generator with the provided value
374  // and the processor ID so that the seed is different
375  // on different processors
376  std::srand( static_cast<unsigned>( training_parameters_random_seed*(1+communicator.rank()) ));
377  }
378  else
379  {
380  // seed the random number generator with the provided value
381  // so that the seed is the same on all processors
382  std::srand( static_cast<unsigned>( training_parameters_random_seed ));
383  }
384  }
385 
386  // initialize training_parameters_in
387  {
388  RBParameters::const_iterator it = min_parameters.begin();
389  RBParameters::const_iterator it_end = min_parameters.end();
390  for ( ; it != it_end; ++it)
391  {
392  std::string param_name = it->first;
393  training_parameters_in[param_name] = NumericVector<Number>::build(communicator).release();
394 
395  if (!serial_training_set)
396  {
397  // Calculate the number of training parameters local to this processor
398  unsigned int n_local_training_samples;
399  unsigned int quotient = n_training_samples_in/communicator.size();
400  unsigned int remainder = n_training_samples_in%communicator.size();
401  if (communicator.rank() < remainder)
402  n_local_training_samples = (quotient + 1);
403  else
404  n_local_training_samples = quotient;
405 
406  training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, false, PARALLEL);
407  }
408  else
409  {
410  training_parameters_in[param_name]->init(n_training_samples_in, false, SERIAL);
411  }
412  }
413  }
414 
415  // finally, set the values
416  {
417  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters_in.begin();
418  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters_in.end();
419 
420  for ( ; it != it_end; ++it)
421  {
422  std::string param_name = it->first;
423  NumericVector<Number> * training_vector = it->second;
424 
425  numeric_index_type first_index = training_vector->first_local_index();
426  for (numeric_index_type i=0; i<training_vector->local_size(); i++)
427  {
428  numeric_index_type index = first_index + i;
429  Real random_number = ((double)std::rand())/RAND_MAX; // in range [0,1]
430 
431  // Generate log10 scaled training parameters
432  if (log_param_scale[param_name])
433  {
434  Real log_min = log10(min_parameters.get_value(param_name));
435  Real log_range = log10(max_parameters.get_value(param_name) / min_parameters.get_value(param_name));
436 
437  training_vector->set(index, pow(10., log_min + random_number*log_range ) );
438  }
439  // Generate linearly scaled training parameters
440  else
441  {
442  training_vector->set(index, random_number*(max_parameters.get_value(param_name) - min_parameters.get_value(param_name))
443  + min_parameters.get_value(param_name));
444  }
445  }
446  }
447  }
448 }
449 
450 template <class Base>
452  std::map<std::string, bool> log_param_scale,
453  std::map<std::string, NumericVector<Number> *> & training_parameters_in,
454  unsigned int n_training_samples_in,
455  const RBParameters & min_parameters,
456  const RBParameters & max_parameters,
457  bool serial_training_set)
458 {
459  libmesh_assert_equal_to ( min_parameters.n_parameters(), max_parameters.n_parameters() );
460  const unsigned int num_params = min_parameters.n_parameters();
461 
462  if (num_params == 0)
463  return;
464 
465  if (num_params > 2)
466  {
467  libMesh::out << "ERROR: Deterministic training sample generation "
468  << " not implemented for more than two parameters." << std::endl;
469  libmesh_not_implemented();
470  }
471 
472  // Clear training_parameters_in
473  {
474  std::map<std::string, NumericVector<Number> *>::iterator it = training_parameters_in.begin();
475  std::map<std::string, NumericVector<Number> *>::const_iterator it_end = training_parameters_in.end();
476 
477  for ( ; it != it_end; ++it)
478  {
479  NumericVector<Number> * training_vector = it->second;
480  delete training_vector;
481  training_vector = libmesh_nullptr;
482  }
483  }
484 
485  // Initialize training_parameters_in
486  {
487  RBParameters::const_iterator it = min_parameters.begin();
488  RBParameters::const_iterator it_end = min_parameters.end();
489  for ( ; it != it_end; ++it)
490  {
491  std::string param_name = it->first;
492  training_parameters_in[param_name] = NumericVector<Number>::build(communicator).release();
493 
494  if (!serial_training_set)
495  {
496  // Calculate the number of training parameters local to this processor
497  unsigned int n_local_training_samples;
498  unsigned int quotient = n_training_samples_in/communicator.size();
499  unsigned int remainder = n_training_samples_in%communicator.size();
500  if (communicator.rank() < remainder)
501  n_local_training_samples = (quotient + 1);
502  else
503  n_local_training_samples = quotient;
504 
505  training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, false, PARALLEL);
506  }
507  else
508  {
509  training_parameters_in[param_name]->init(n_training_samples_in, false, SERIAL);
510  }
511  }
512  }
513 
514  if (num_params == 1)
515  {
516  NumericVector<Number> * training_vector = training_parameters_in.begin()->second;
517  bool use_log_scaling = log_param_scale.begin()->second;
518  Real min_param = min_parameters.begin()->second;
519  Real max_param = max_parameters.begin()->second;
520 
521  numeric_index_type first_index = training_vector->first_local_index();
522  for (numeric_index_type i=0; i<training_vector->local_size(); i++)
523  {
524  numeric_index_type index = first_index+i;
525  if (use_log_scaling)
526  {
527  Real epsilon = 1.e-6; // Prevent rounding errors triggering asserts
528  Real log_min = log10(min_param + epsilon);
529  Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
530  Real step_size = log_range /
531  std::max((unsigned int)1,(n_training_samples_in-1));
532 
533  if (index<(n_training_samples_in-1))
534  {
535  training_vector->set(index, pow(10., log_min + index*step_size ));
536  }
537  else
538  {
539  // due to rounding error, the last parameter can be slightly
540  // bigger than max_parameters, hence snap back to the max
541  training_vector->set(index, max_param);
542  }
543  }
544  else
545  {
546  // Generate linearly scaled training parameters
547  Real step_size = (max_param - min_param) /
548  std::max((unsigned int)1,(n_training_samples_in-1));
549  training_vector->set(index, index*step_size + min_param);
550  }
551  }
552  }
553 
554 
555  // This is for two parameters
556  if (num_params == 2)
557  {
558  // First make sure n_training_samples_in is a square number
559  unsigned int n_training_parameters_per_var = static_cast<unsigned int>( std::sqrt(static_cast<Real>(n_training_samples_in)) );
560  if ((n_training_parameters_per_var*n_training_parameters_per_var) != n_training_samples_in)
561  libmesh_error_msg("Error: Number of training parameters = " \
562  << n_training_samples_in \
563  << ".\n" \
564  << "Deterministic training set generation with two parameters requires\n " \
565  << "the number of training parameters to be a perfect square.");
566 
567  // make a matrix to store all the parameters, put them in vector form afterwards
568  std::vector<std::vector<Real>> training_parameters_matrix(num_params);
569 
570  RBParameters::const_iterator it = min_parameters.begin();
571  RBParameters::const_iterator it_end = min_parameters.end();
572  unsigned int i = 0;
573  for ( ; it != it_end; ++it)
574  {
575  std::string param_name = it->first;
576  Real min_param = it->second;
577  bool use_log_scaling = log_param_scale[param_name];
578  Real max_param = max_parameters.get_value(param_name);
579 
580  training_parameters_matrix[i].resize(n_training_parameters_per_var);
581 
582  for (unsigned int j=0; j<n_training_parameters_per_var; j++)
583  {
584  // Generate log10 scaled training parameters
585  if (use_log_scaling)
586  {
587  Real epsilon = 1.e-6; // Prevent rounding errors triggering asserts
588  Real log_min = log10(min_param + epsilon);
589  Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
590  Real step_size = log_range /
591  std::max((unsigned int)1,(n_training_parameters_per_var-1));
592 
593  if (j<(n_training_parameters_per_var-1))
594  {
595  training_parameters_matrix[i][j] = pow(10., log_min + j*step_size );
596  }
597  else
598  {
599  // due to rounding error, the last parameter can be slightly
600  // bigger than max_parameters, hence snap back to the max
601  training_parameters_matrix[i][j] = max_param;
602  }
603  }
604  else
605  {
606  // Generate linearly scaled training parameters
607  Real step_size = (max_param - min_param) /
608  std::max((unsigned int)1,(n_training_parameters_per_var-1));
609  training_parameters_matrix[i][j] = j*step_size + min_param;
610  }
611 
612  }
613  i++;
614  }
615 
616  // now load into training_samples_in:
617  std::map<std::string, NumericVector<Number> *>::iterator new_it = training_parameters_in.begin();
618 
619  NumericVector<Number> * training_vector_0 = new_it->second;
620  ++new_it;
621  NumericVector<Number> * training_vector_1 = new_it->second;
622 
623  for (unsigned int index1=0; index1<n_training_parameters_per_var; index1++)
624  {
625  for (unsigned int index2=0; index2<n_training_parameters_per_var; index2++)
626  {
627  unsigned int index = index1*n_training_parameters_per_var + index2;
628 
629  if ((training_vector_0->first_local_index() <= index) &&
630  (index < training_vector_0->last_local_index()))
631  {
632  training_vector_0->set(index, training_parameters_matrix[0][index1]);
633  training_vector_1->set(index, training_parameters_matrix[1][index2]);
634  }
635  }
636  }
637 
638  // libMesh::out << "n_training_samples = " << n_training_samples_in << std::endl;
639  // for (unsigned int index=0; index<n_training_samples_in; index++)
640  // {
641  // libMesh::out << "training parameters for index="<<index<<":"<<std::endl;
642  // for (unsigned int param=0; param<num_params; param++)
643  // {
644  // libMesh::out << " " << (*training_parameters_in[param])(index);
645  // }
646  // libMesh::out << std::endl << std::endl;
647  // }
648 
649  }
650 }
651 
652 
653 template <class Base>
655 {
656  libmesh_assert_less (proc_id, this->n_processors());
657 
658  // create a copy of the current parameters
659  RBParameters current_parameters = get_parameters();
660 
661  // copy current_parameters to current_parameters_vector in order to broadcast
662  std::vector<Real> current_parameters_vector;
663 
664  RBParameters::const_iterator it = current_parameters.begin();
665  RBParameters::const_iterator it_end = current_parameters.end();
666 
667  for ( ; it != it_end; ++it)
668  {
669  current_parameters_vector.push_back(it->second);
670  }
671 
672  // do the broadcast
673  this->comm().broadcast(current_parameters_vector, proc_id);
674 
675  // update the copy of the RBParameters object
676  it = current_parameters.begin();
677  unsigned int count = 0;
678  for ( ; it != it_end; ++it)
679  {
680  std::string param_name = it->first;
681  current_parameters.set_value(param_name, current_parameters_vector[count]);
682  count++;
683  }
684 
685  // set the parameters globally
686  set_parameters(current_parameters);
687 }
688 
689 template <class Base>
691 {
692  this->training_parameters_random_seed = seed;
693 }
694 
695 // Template specializations
696 
697 // EigenSystem is only defined if we have SLEPc
698 #if defined(LIBMESH_HAVE_SLEPC)
700 #endif
701 
703 
704 } // namespace libMesh
T libmesh_real(T a)
static void generate_training_parameters_random(const Parallel::Communicator &communicator, std::map< std::string, bool > log_param_scale, std::map< std::string, NumericVector< Number > * > &training_parameters_in, unsigned int n_training_samples_in, const RBParameters &min_parameters, const RBParameters &max_parameters, int training_parameters_random_seed=-1, bool serial_training_set=false)
Static helper function for generating a randomized set of parameters.
virtual void clear()
Clear all the data structures associated with the system.
numeric_index_type get_local_n_training_samples() const
Get the total number of training samples local to this processor.
bool training_parameters_initialized
Boolean flag to indicate whether or not the parameter ranges have been initialized.
This is the EquationSystems class.
Encapsulates the MPI_Comm object.
Definition: parallel.h:657
UniquePtr< NumericVector< Number > > inner_product_storage_vector
We keep an extra temporary vector that is useful for performing inner products (avoids unnecessary me...
virtual numeric_index_type last_local_index() const =0
void broadcast_parameters(unsigned int proc_id)
Broadcasts parameters on processor proc_id to all processors.
unsigned int size() const
Definition: parallel.h:726
void maxloc(T &r, unsigned int &max_id) const
Take a local variable and replace it with the maximum of it&#39;s values on all processors, returning the minimum rank of a processor which originally held the maximum value.
Real get_value(const std::string &param_name) const
Get the value of the specific parameter.
Definition: rb_parameters.C:45
MPI_Comm communicator
Communicator object for talking with subsets of processors.
Definition: parallel.h:181
unsigned int get_n_params() const
Get the number of parameters.
uint8_t processor_id_type
Definition: id_types.h:99
const class libmesh_nullptr_t libmesh_nullptr
unsigned int n_parameters() const
Get the number of parameters that have been added.
Definition: rb_parameters.C:62
bool is_discrete_parameter(const std::string &mu_name) const
Is parameter mu_name discrete?
static void get_global_max_error_pair(const Parallel::Communicator &communicator, std::pair< numeric_index_type, Real > &error_pair)
Static function to return the error pair (index,error) that is corresponds to the largest error on al...
The libMesh namespace provides an interface to certain functionality in the library.
static Real get_closest_value(Real value, const std::vector< Real > &list_of_values)
long double max(long double a, double b)
virtual void set_params_from_training_set_and_broadcast(unsigned int index)
Load the specified training parameter and then broadcast to all processors.
libmesh_assert(j)
numeric_index_type get_n_training_samples() const
Get the total number of training samples.
static UniquePtr< NumericVector< T > > build(const Parallel::Communicator &comm, const SolverPackage solver_package=libMesh::default_solver_package())
Builds a NumericVector on the processors in communicator comm using the linear solver package specifi...
dof_id_type numeric_index_type
Definition: id_types.h:92
void set_parameters(const RBParameters &params)
Set the current parameters to params.
void set_params_from_training_set(unsigned int index)
Set parameters to the RBParameters stored in index index of the training set.
RBConstructionBase(EquationSystems &es, const std::string &name, const unsigned int number)
Constructor.
const_iterator end() const
Get a constant iterator to the end of this RBParameters object.
Definition: rb_parameters.C:85
virtual void initialize_training_parameters(const RBParameters &mu_min, const RBParameters &mu_max, unsigned int n_training_parameters, std::map< std::string, bool > log_param_scale, bool deterministic=true)
Initialize the parameter ranges and indicate whether deterministic or random training parameters shou...
std::map< std::string, NumericVector< Number > * > training_parameters
The training samples.
void broadcast(T &data, const unsigned int root_id=0) const
Take a local value and broadcast it to all processors.
This class is part of the rbOOmit framework.
Definition: rb_parameters.h:42
double pow(double a, int b)
virtual numeric_index_type local_size() const =0
unsigned int get_n_discrete_params() const
Get the number of discrete parameters.
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
numeric_index_type get_first_local_training_index() const
Get the first local index of the training parameters.
void set_training_random_seed(unsigned int seed)
Set the seed that is used to randomly generate training parameters.
void set_value(const std::string &param_name, Real value)
Set the value of the specified parameter.
Definition: rb_parameters.C:57
RBParameters get_params_from_training_set(unsigned int index)
Return the RBParameters in index index of training set.
OStreamProxy out
static const bool value
Definition: xdr_io.C:108
numeric_index_type get_last_local_training_index() const
Get the last local index of the training parameters.
virtual ~RBConstructionBase()
Destructor.
virtual numeric_index_type first_local_index() const =0
bool serial_training_set
This boolean flag indicates whether or not the training set should be the same on all processors...
unsigned int rank() const
Definition: parallel.h:724
const std::map< std::string, std::vector< Real > > & get_discrete_parameter_values() const
Get a const reference to the discrete parameter values.
virtual void set(const numeric_index_type i, const T value)=0
Sets v(i) = value.
const_iterator begin() const
Get a constant iterator to beginning of this RBParameters object.
Definition: rb_parameters.C:80
int training_parameters_random_seed
If < 0, use std::time() * processor_id() to seed the random number generator for the training paramet...
const RBParameters & get_parameters() const
Get the current parameters.
virtual void init_data()
Initializes the member data fields associated with the system, so that, e.g., assemble() may be used...
std::map< std::string, Real >::const_iterator const_iterator
Definition: rb_parameters.h:57
virtual void load_training_set(std::map< std::string, std::vector< Number >> &new_training_set)
Overwrite the training parameters with new_training_set.
processor_id_type processor_id()
Definition: libmesh_base.h:96
virtual void clear()
Clear all the data structures associated with the system.
static void generate_training_parameters_deterministic(const Parallel::Communicator &communicator, std::map< std::string, bool > log_param_scale, std::map< std::string, NumericVector< Number > * > &training_parameters_in, unsigned int n_training_samples_in, const RBParameters &min_parameters, const RBParameters &max_parameters, bool serial_training_set=false)
Static helper function for generating a deterministic set of parameters.
processor_id_type n_processors()
Definition: libmesh_base.h:88