dimensionSetIO.C
Go to the documentation of this file.
1 /*---------------------------------------------------------------------------*\
2  ========= |
3  \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
4  \\ / O peration |
5  \\ / A nd | Copyright (C) 2011-2015 OpenFOAM Foundation
6  \\/ M anipulation |
7 -------------------------------------------------------------------------------
8 License
9  This file is part of OpenFOAM.
10 
11  OpenFOAM is free software: you can redistribute it and/or modify it
12  under the terms of the GNU General Public License as published by
13  the Free Software Foundation, either version 3 of the License, or
14  (at your option) any later version.
15 
16  OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
17  ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
18  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19  for more details.
20 
21  You should have received a copy of the GNU General Public License
22  along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
23 
24 \*---------------------------------------------------------------------------*/
25 
26 #include "dimensionSet.H"
27 #include "IOstreams.H"
28 #include "dimensionedScalar.H"
29 #include <limits>
30 
31 // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
32 
34 {
35  is >> *this;
36 }
37 
38 
39 Foam::dimensionSet::tokeniser::tokeniser(Istream& is)
40 :
41  is_(is),
42  tokens_(100),
43  start_(0),
44  size_(0)
45 {}
46 
47 
48 // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
49 
50 void Foam::dimensionSet::tokeniser::push(const token& t)
51 {
52  label end = (start_+size_)%tokens_.size();
53  tokens_[end] = t;
54  if (size_ == tokens_.size())
55  {
56  start_ = tokens_.fcIndex(start_);
57  }
58  else
59  {
60  size_++;
61  }
62 }
63 
64 
65 Foam::token Foam::dimensionSet::tokeniser::pop()
66 {
67  token t = tokens_[start_];
68  start_ = tokens_.fcIndex(start_);
69  --size_;
70  return t;
71 }
72 
73 
74 void Foam::dimensionSet::tokeniser::unpop(const token& t)
75 {
76  ++size_;
77  start_ = tokens_.rcIndex(start_);
78  tokens_[start_] = t;
79 }
80 
81 
82 bool Foam::dimensionSet::tokeniser::hasToken() const
83 {
84  return size_ || is_.good();
85 }
86 
87 
88 bool Foam::dimensionSet::tokeniser::valid(char c)
89 {
90  return
91  (
92  !isspace(c)
93  && c != '"' // string quote
94  && c != '\'' // string quote
95  && c != '/' // div
96  && c != ';' // end statement
97  && c != '{' // beg subdict
98  && c != '}' // end subdict
99  && c != '(' // beg expr
100  && c != ')' // end expr
101  && c != '[' // beg dim
102  && c != ']' // end dim
103  && c != '^' // power
104  && c != '*' // mult
105  );
106 }
107 
108 
109 Foam::label Foam::dimensionSet::tokeniser::priority(const token& t)
110 {
111  if (!t.isPunctuation())
112  {
113  return 0;
114  }
115  else if
116  (
117  t.pToken() == token::MULTIPLY
118  || t.pToken() == token::DIVIDE
119  )
120  {
121  return 2;
122  }
123  else if (t.pToken() == '^')
124  {
125  return 3;
126  }
127  else
128  {
129  return 0;
130  }
131 }
132 
133 
134 void Foam::dimensionSet::tokeniser::splitWord(const word& w)
135 {
136  size_t start = 0;
137  for (size_t i=0; i<w.size(); ++i)
138  {
139  if (!valid(w[i]))
140  {
141  if (i > start)
142  {
143  word subWord = w(start, i-start);
144  if (isdigit(subWord[0]) || subWord[0] == token::SUBTRACT)
145  {
146  push(token(readScalar(IStringStream(subWord)())));
147  }
148  else
149  {
150  push(token(subWord));
151  }
152  }
153  if (w[i] != token::SPACE)
154  {
155  if (isdigit(w[i]))
156  {
157  push(token(readScalar(IStringStream(w[i])())));
158  }
159  else
160  {
161  push(token::punctuationToken(w[i]));
162  }
163  }
164  start = i+1;
165  }
166  }
167  if (start < w.size())
168  {
169  word subWord = w(start, w.size()-start);
170  if (isdigit(subWord[0]) || subWord[0] == token::SUBTRACT)
171  {
172  push(token(readScalar(IStringStream(subWord)())));
173  }
174  else
175  {
176  push(token(subWord));
177  }
178  }
179 }
180 
181 
182 Foam::token Foam::dimensionSet::tokeniser::nextToken()
183 {
184  if (size_ == 0)
185  {
186  token t(is_);
187  if (t.isWord())
188  {
189  splitWord(t.wordToken());
190  return pop();
191  }
192  else
193  {
194  return t;
195  }
196  }
197  else
198  {
199  return pop();
200  }
201 }
202 
203 
204 void Foam::dimensionSet::tokeniser::putBack(const token& t)
205 {
206  if (size_ == 0)
207  {
208  push(t);
209  }
210  else
211  {
212  unpop(t);
213  }
214 }
215 
216 
217 void Foam::dimensionSet::round(const scalar tol)
218 {
219  for (int i=0; i < dimensionSet::nDimensions; ++i)
220  {
221  scalar integralPart;
222  scalar fractionalPart = std::modf(exponents_[i], &integralPart);
223 
224  if (mag(fractionalPart-1.0) <= tol)
225  {
226  exponents_[i] = 1.0+integralPart;
227  }
228  else if (mag(fractionalPart+1.0) <= tol)
229  {
230  exponents_[i] = -1.0+integralPart;
231  }
232  else if (mag(fractionalPart) <= tol)
233  {
234  exponents_[i] = integralPart;
235  }
236  }
237 }
238 
239 
240 Foam::dimensionedScalar Foam::dimensionSet::parse
241 (
242  const label lastPrior,
243  tokeniser& tis,
244  const HashTable<dimensionedScalar>& readSet
245 ) const
246 {
247  dimensionedScalar ds("", dimless, 1.0);
248 
249  // Get initial token
250  token nextToken(tis.nextToken());
251 
252  // Store type of last token read. Used to detect two consecutive
253  // symbols and assume multiplication
254  bool haveReadSymbol = false;
255 
256 
257  while (true)
258  {
259  if (nextToken.isWord())
260  {
261  const word& unitName = nextToken.wordToken();
262  const dimensionedScalar& unitDim = readSet[unitName];
263  ds.dimensions() *= unitDim.dimensions();
264  ds.value() *= unitDim.value();
265  haveReadSymbol = true;
266  }
267  else if (nextToken.isNumber())
268  {
269  // no dimensions, just value
270  ds.value() *= nextToken.number();
271  haveReadSymbol = true;
272  }
273  else if (nextToken.isPunctuation())
274  {
275  label nextPrior = tokeniser::priority(nextToken);
276 
277  if (nextToken.pToken() == token::BEGIN_SQR)
278  {
279  // No idea when this will happen
280  tis.putBack(nextToken);
281  return ds;
282  }
283  else if (nextToken.pToken() == token::END_SQR)
284  {
285  tis.putBack(nextToken);
286  return ds;
287  }
288  else if (nextToken.pToken() == token::BEGIN_LIST)
289  {
290  dimensionedScalar sub(parse(nextPrior, tis, readSet));
291 
292  token t = tis.nextToken();
293  if (!t.isPunctuation() || t.pToken() != token::END_LIST)
294  {
296  (
297  tis.stream()
298  ) << "Illegal token " << t << exit(FatalIOError);
299  }
300 
301  ds.dimensions() *= sub.dimensions();
302  ds.value() *= sub.value();
303 
304  haveReadSymbol = true;
305  }
306  else if (nextToken.pToken() == token::END_LIST)
307  {
308  tis.putBack(nextToken);
309  return ds;
310  }
311  else if (nextToken.pToken() == token::MULTIPLY)
312  {
313  if (nextPrior > lastPrior)
314  {
315  dimensionedScalar sub(parse(nextPrior, tis, readSet));
316 
317  ds.dimensions() *= sub.dimensions();
318  ds.value() *= sub.value();
319  }
320  else
321  {
322  // Restore token
323  tis.putBack(nextToken);
324  return ds;
325  }
326  haveReadSymbol = false;
327  }
328  else if (nextToken.pToken() == token::DIVIDE)
329  {
330  if (nextPrior > lastPrior)
331  {
332  dimensionedScalar sub(parse(nextPrior, tis, readSet));
333 
334  ds.dimensions() /= sub.dimensions();
335  ds.value() /= sub.value();
336  }
337  else
338  {
339  tis.putBack(nextToken);
340  return ds;
341  }
342  haveReadSymbol = false;
343  }
344  else if (nextToken.pToken() == '^')
345  {
346  if (nextPrior > lastPrior)
347  {
348  dimensionedScalar exp(parse(nextPrior, tis, readSet));
349 
350  ds.dimensions().reset(pow(ds.dimensions(), exp.value()));
351  // Round to nearest integer if close to it
352  ds.dimensions().round(10*smallExponent);
353  ds.value() = Foam::pow(ds.value(), exp.value());
354  }
355  else
356  {
357  tis.putBack(nextToken);
358  return ds;
359  }
360  haveReadSymbol = false;
361  }
362  else
363  {
365  (
366  tis.stream()
367  ) << "Illegal token " << nextToken << exit(FatalIOError);
368  }
369  }
370  else
371  {
373  (
374  tis.stream()
375  ) << "Illegal token " << nextToken << exit(FatalIOError);
376  }
377 
378 
379  if (!tis.hasToken())
380  {
381  break;
382  }
383 
384  nextToken = tis.nextToken();
385  if (nextToken.error())
386  {
387  break;
388  }
389 
390  if (haveReadSymbol && (nextToken.isWord() || nextToken.isNumber()))
391  {
392  // Two consecutive symbols. Assume multiplication
393  tis.putBack(nextToken);
394  nextToken = token(token::MULTIPLY);
395  }
396  }
397 
398  return ds;
399 }
400 
401 
403 (
404  Istream& is,
405  scalar& multiplier,
406  const HashTable<dimensionedScalar>& readSet
407 )
408 {
409  multiplier = 1.0;
410 
411  // Read begining of dimensionSet
412  token startToken(is);
413 
414  if (startToken != token::BEGIN_SQR)
415  {
417  (
418  is
419  ) << "expected a " << token::BEGIN_SQR << " in dimensionSet"
420  << endl << "in stream " << is.info()
421  << exit(FatalIOError);
422  }
423 
424  // Read next token
425  token nextToken(is);
426 
427  if (!nextToken.isNumber())
428  {
429  is.putBack(nextToken);
430 
431  tokeniser tis(is);
432 
433  dimensionedScalar ds(parse(0, tis, readSet));
434 
435  multiplier = ds.value();
436  for (int i=0; i < dimensionSet::nDimensions; ++i)
437  {
438  exponents_[i] = ds.dimensions()[i];
439  }
440  }
441  else
442  {
443  // Read first five dimensions
444  exponents_[dimensionSet::MASS] = nextToken.number();
445  for (int Dimension=1; Dimension<dimensionSet::CURRENT; Dimension++)
446  {
447  is >> exponents_[Dimension];
448  }
449 
450  // Read next token
451  token nextToken(is);
452 
453  // If next token is another number
454  // read last two dimensions
455  // and then read another token for the end of the dimensionSet
456  if (nextToken.isNumber())
457  {
458  exponents_[dimensionSet::CURRENT] = nextToken.number();
459  is >> nextToken;
460  exponents_[dimensionSet::LUMINOUS_INTENSITY] = nextToken.number();
461  is >> nextToken;
462  }
463  else
464  {
465  exponents_[dimensionSet::CURRENT] = 0;
466  exponents_[dimensionSet::LUMINOUS_INTENSITY] = 0;
467  }
468 
469  // Check end of dimensionSet
470  if (nextToken != token::END_SQR)
471  {
473  (
474  is
475  ) << "expected a " << token::END_SQR << " in dimensionSet "
476  << endl << "in stream " << is.info()
477  << exit(FatalIOError);
478  }
479  }
480  // Check state of Istream
481  is.check("Istream& operator>>(Istream&, dimensionSet&)");
482 
483  return is;
484 }
485 
486 
488 (
489  Istream& is,
490  scalar& multiplier
491 )
492 {
493  return read(is, multiplier, unitSet());
494 }
495 
496 
498 (
499  Istream& is,
500  scalar& multiplier,
501  const dictionary& readSet
502 )
503 {
504  multiplier = 1.0;
505 
506  // Read begining of dimensionSet
507  token startToken(is);
508 
509  if (startToken != token::BEGIN_SQR)
510  {
512  (
513  is
514  ) << "expected a " << token::BEGIN_SQR << " in dimensionSet"
515  << endl << "in stream " << is.info()
516  << exit(FatalIOError);
517  }
518 
519  // Read next token
520  token nextToken(is);
521 
522  if (nextToken.isWord())
523  {
524  bool continueParsing = true;
525  do
526  {
527  word symbolPow = nextToken.wordToken();
528  if (symbolPow[symbolPow.size()-1] == token::END_SQR)
529  {
530  symbolPow = symbolPow(0, symbolPow.size()-1);
531  continueParsing = false;
532  }
533 
534 
535  // Parse unit
536  dimensionSet symbolSet(dimless);
537 
538  size_t index = symbolPow.find('^');
539  if (index != string::npos)
540  {
541  word symbol = symbolPow(0, index);
542  word exp = symbolPow(index+1, symbolPow.size()-index+1);
543  scalar exponent = readScalar(IStringStream(exp)());
544 
546  s.read(readSet[symbol], readSet);
547 
548  symbolSet.reset(pow(s.dimensions(), exponent));
549  // Round to nearest integer if close to it
550  symbolSet.round(10*smallExponent);
551  multiplier *= Foam::pow(s.value(), exponent);
552  }
553  else
554  {
556  s.read(readSet[symbolPow], readSet);
557 
558  symbolSet.reset(s.dimensions());
559  multiplier *= s.value();
560  }
561 
562  // Add dimensions without checking
563  for (int i=0; i < dimensionSet::nDimensions; ++i)
564  {
565  exponents_[i] += symbolSet[i];
566  }
567 
568  if (continueParsing)
569  {
570  nextToken = token(is);
571 
572  if (!nextToken.isWord() || nextToken == token::END_SQR)
573  {
574  continueParsing = false;
575  }
576  }
577  }
578  while (continueParsing);
579  }
580  else
581  {
582  // Read first five dimensions
583  exponents_[dimensionSet::MASS] = nextToken.number();
584  for (int Dimension=1; Dimension<dimensionSet::CURRENT; Dimension++)
585  {
586  is >> exponents_[Dimension];
587  }
588 
589  // Read next token
590  token nextToken(is);
591 
592  // If next token is another number
593  // read last two dimensions
594  // and then read another token for the end of the dimensionSet
595  if (nextToken.isNumber())
596  {
597  exponents_[dimensionSet::CURRENT] = nextToken.number();
598  is >> nextToken;
599  exponents_[dimensionSet::LUMINOUS_INTENSITY] = nextToken.number();
600  is >> nextToken;
601  }
602  else
603  {
604  exponents_[dimensionSet::CURRENT] = 0;
605  exponents_[dimensionSet::LUMINOUS_INTENSITY] = 0;
606  }
607 
608  // Check end of dimensionSet
609  if (nextToken != token::END_SQR)
610  {
612  (
613  is
614  ) << "expected a " << token::END_SQR << " in dimensionSet "
615  << endl << "in stream " << is.info()
616  << exit(FatalIOError);
617  }
618  }
619 
620  // Check state of Istream
621  is.check("Istream& operator>>(Istream&, dimensionSet&)");
622 
623  return is;
624 }
625 
626 
628 (
629  Ostream& os,
630  scalar& multiplier,
631  const dimensionSets& writeUnits
632 ) const
633 {
634  multiplier = 1.0;
635 
636  os << token::BEGIN_SQR;
637 
638  if (writeUnits.valid() && os.format() == IOstream::ASCII)
639  {
641  for (int d=0; d<dimensionSet::nDimensions; d++)
642  {
643  exponents[d] = exponents_[d];
644  }
645  writeUnits.coefficients(exponents);
646 
647  bool hasPrinted = false;
648 
649  // Set precision to lots
650  std::streamsize oldPrecision = os.precision
651  (
652  std::numeric_limits<scalar>::digits10
653  );
654 
655  forAll(exponents, i)
656  {
657  if (mag(exponents[i]) > smallExponent)
658  {
659  const dimensionedScalar& ds = writeUnits.units()[i];
660 
661  if (hasPrinted)
662  {
663  os << token::SPACE;
664  }
665  hasPrinted = true;
666  os << ds.name();
667  if (mag(exponents[i]-1) > smallExponent)
668  {
669  os << '^' << exponents[i];
670 
671  multiplier *= Foam::pow(ds.value(), exponents[i]);
672  }
673  else
674  {
675  multiplier *= ds.value();
676  }
677  }
678  }
679 
680  // Reset precision
681  os.precision(oldPrecision);
682  }
683  else
684  {
685  for (int d=0; d<dimensionSet::nDimensions-1; d++)
686  {
687  os << exponents_[d] << token::SPACE;
688  }
689  os << exponents_[dimensionSet::nDimensions-1];
690  }
691 
692  os << token::END_SQR;
693 
694  // Check state of Ostream
695  os.check("Ostream& operator<<(Ostream&, const dimensionSet&)");
696 
697  return os;
698 }
699 
700 
702 (
703  Ostream& os,
704  scalar& multiplier
705 ) const
706 {
707  return write(os, multiplier, writeUnitSet());
708 }
709 
710 
711 // * * * * * * * * * * * * * * * IOstream Operators * * * * * * * * * * * * //
712 
714 {
715  scalar multiplier;
716  dset.read(is, multiplier);
717 
718  if (mag(multiplier-1.0) > dimensionSet::smallExponent)
719  {
721  << "Cannot use scaled units in dimensionSet"
722  << exit(FatalIOError);
723  }
724 
725  // Check state of Istream
726  is.check("Istream& operator>>(Istream&, dimensionSet&)");
727 
728  return is;
729 }
730 
731 
733 {
734  scalar multiplier;
735  dset.write(os, multiplier);
736 
737  // Check state of Ostream
738  os.check("Ostream& operator<<(Ostream&, const dimensionSet&)");
739 
740  return os;
741 }
742 
743 
744 // ************************************************************************* //
#define forAll(list, i)
Loop across all elements in list.
Definition: UList.H:428
bool isWord() const
Definition: tokenI.H:213
punctuationToken pToken() const
Definition: tokenI.H:200
intWM_LABEL_SIZE_t label
A label is an int32_t or int64_t as specified by the pre-processor macro WM_LABEL_SIZE.
Definition: label.H:59
friend dimensionSet pow(const dimensionSet &, const scalar)
errorManipArg< error, int > exit(error &err, const int errNo=1)
Definition: errorManip.H:124
A list of keyword definitions, which are a keyword followed by any number of values (e...
Definition: dictionary.H:137
virtual bool check(const char *operation) const
Check IOstream status for given operation.
Definition: IOstream.C:92
friend dimensionSet mag(const dimensionSet &)
scalar number() const
Definition: tokenI.H:337
const word & wordToken() const
Definition: tokenI.H:218
An Istream is an abstract base class for all input systems (streams, files, token lists etc)...
Definition: Istream.H:57
A token holds items read from Istream.
Definition: token.H:69
void putBack(const token &)
Put back token.
Definition: Istream.C:30
Ostream & endl(Ostream &os)
Add newline and flush stream.
Definition: Ostream.H:253
dimensionSet(const scalar mass, const scalar length, const scalar time, const scalar temperature, const scalar moles, const scalar current, const scalar luminousIntensity)
Construct given individual dimension exponents for all.
Definition: dimensionSet.C:42
Ostream & write(Ostream &os, scalar &multiplier, const dimensionSets &) const
Write using provided units.
bool isNumber() const
Definition: tokenI.H:332
Useful combination of include files which define Sin, Sout and Serr and the use of IO streams general...
Dimension set for the base types.
Definition: dimensionSet.H:120
gmvFile<< "tracers "<< particles.size()<< nl;forAllConstIter(Cloud< passiveParticle >, particles, iter){ gmvFile<< iter().position().x()<< " ";}gmvFile<< nl;forAllConstIter(Cloud< passiveParticle >, particles, iter){ gmvFile<< iter().position().y()<< " ";}gmvFile<< nl;forAllConstIter(Cloud< passiveParticle >, particles, iter){ gmvFile<< iter().position().z()<< " ";}gmvFile<< nl;forAll(lagrangianScalarNames, i){ word name=lagrangianScalarNames[i];IOField< scalar > s(IOobject(name, runTime.timeName(), cloud::prefix, mesh, IOobject::MUST_READ, IOobject::NO_WRITE))
dimensionedScalar exp(const dimensionedScalar &ds)
void read(const dictionary &)
Update the value of dimensioned<Type>
A class for handling words, derived from string.
Definition: word.H:59
Istream & operator>>(Istream &, directionInfo &)
const PtrList< dimensionedScalar > & units() const
Return the units.
const Type & value() const
Return const reference to value.
bool good() const
Definition: tokenI.H:180
const HashTable< dimensionedScalar > & unitSet()
Set of all dimensions.
Definition: dimensionSets.C:97
bool readScalar(const char *buf, doubleScalar &s)
Read whole of buf as a scalar. Return true if succesful.
Definition: doubleScalar.H:63
streamFormat format() const
Return current stream format.
Definition: IOstream.H:377
An STL-conforming hash table.
Definition: HashTable.H:62
An Ostream is an abstract base class for all output systems (streams, files, token lists...
Definition: Ostream.H:53
const word & name() const
Return const reference to name.
dimensionedScalar pow(const dimensionedScalar &ds, const dimensionedScalar &expt)
void reset(const dimensionSet &)
Definition: dimensionSet.C:108
const dimensionSet dimless(0, 0, 0, 0, 0, 0, 0)
Definition: dimensionSets.H:47
bool isspace(char c)
Definition: char.H:53
#define FatalIOErrorInFunction(ios)
Report an error message using Foam::FatalIOError.
Definition: error.H:331
Input from memory buffer stream.
Definition: IStringStream.H:49
const dimensionedScalar c
Speed of light in a vacuum.
Ostream & operator<<(Ostream &, const ensightPart &)
const dimensionSet & dimensions() const
Return const reference to dimensions.
punctuationToken
Standard punctuation tokens.
Definition: token.H:94
const dimensionSets & writeUnitSet()
Set of units.
static const scalar smallExponent
Definition: dimensionSet.H:147
void coefficients(scalarField &) const
(if valid) obtain set of coefficients of unitNames
bool isPunctuation() const
Definition: tokenI.H:195
InfoProxy< IOstream > info() const
Return info proxy.
Definition: IOstream.H:531
Istream & read(Istream &is, scalar &multiplier, const dictionary &)
Read using provided units. Used only in initial parsing.
bool valid() const
Is there a valid inverse of the selected unit.
virtual int precision() const =0
Get precision of output field.
IOerror FatalIOError