sensors.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #include "sensors.h"
25 #include "configurationhelper.h"
26 #include "motorcontrollers.h"
27 #include "logger.h"
28 #include "graphicalwobject.h"
29 #include "arena.h"
30 #include <QStringList>
31 #include <QList>
32 #include <QtAlgorithms>
33 #include <limits>
34 #include <cmath>
35 #include <QLinkedList>
36 #include <QFile>
37 #include <QTextStream>
38 
39 namespace farsa {
40 
41 //ObjectPositionSensor : begin implementation
42 // it returns the absolute coordinate of an object into the world
44  Sensor(params, prefix) {
45  neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
46  objectName = ConfigurationHelper::getString( params, prefix+"object", "object" );
47  QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
48  QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
49  if ( vec1.size() == 3 && vec2.size() == 3 ) {
50  linearize = true;
51  bbMin = wVector( vec1[0], vec1[1], vec1[2] );
52  bbMax = wVector( vec2[0], vec2[1], vec2[2] );
53  } else {
54  linearize = false;
55  if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
56  Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
57  }
58  }
59 
60  // Declaring the resources that are needed here
61  usableResources( QStringList() << objectName << neuronsIteratorResource );
62 }
63 
65  // nothing to do
66 }
67 
68 void ObjectPositionSensor::describe( QString type ) {
69  Sensor::describe( type );
70  Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
71  d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
72  d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
73  d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
74  d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
75 }
76 
78  // Checking all resources we need exist
80 
81  // Acquiring the lock to get resources
82  ResourcesLocker locker( this );
83 
84  WObject* object = getResource<WObject>( objectName );
85  wVector pos = object->matrix().w_pos;
86  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
87  evonetIt->setCurrentBlock( name() );
88  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
89  if ( linearize ) {
90  // linearize into [0,1]
91  evonetIt->setInput( linearMap( pos[i], bbMin[i], bbMax[i], 0, 1 ) );
92  } else {
93  evonetIt->setInput( pos[i] );
94  }
95  }
96 }
97 
99  return 3;
100 }
101 
102 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
103  if (changeType == Deleted) {
105  return;
106  }
107 
108  if (resourceName == objectName) {
109  // Nothing to do here, we get the object with getResource() in update()
110  } else if (resourceName == neuronsIteratorResource) {
111  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
112  evonetIt->setCurrentBlock( name() );
113  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
114  evonetIt->setGraphicProperties( QString("obj")+QString::number(i), -10.0, 10.0, Qt::red );
115  }
116  } else {
117  Logger::info("Unknown resource " + resourceName + " for " + name());
118  }
119 }
120 
122 {
123  Sensor::save( params, prefix );
124  params.startObjectParameters( prefix, "ObjectPositionSensor", this );
125  params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
126  params.createParameter( prefix, "object", objectName );
127  if ( linearize ) {
128  params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
129  params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
130  }
131 }
132 //ObjectPositionSensor : end implementation
133 
134 namespace __LinearCamera_internal {
135  #ifndef GLMultMatrix
136  #define GLMultMatrix glMultMatrixf
137  // for double glMultMatrixd
138  #endif
139 
143  const float linearCameraCubeSide = 0.02f;
144 
149  const float linearCameraReceptorsLength = 0.1f;
150 
155  {
156  public:
170  LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
171  GraphicalWObject(object->world(), name),
172  m_object(object),
173  m_transformation(transformation),
174  m_minAngle(minAngle),
175  m_maxAngle(maxAngle),
176  m_numReceptors(numReceptors),
178  m_receptors(m_numReceptors, Qt::black)
179  {
180  // Attaching to object (which also becomes our owner)
181  attachToObject(m_object, true);
182 
183  // We also use our own color and texture
185  setTexture("");
186  setColor(Qt::white);
187  }
188 
193  {
194  }
195 
202  void setPerceivedColors(const QVector<QColor>& receptors)
203  {
204  m_receptorsMutex.lock();
205  m_receptors = receptors;
206  m_receptorsMutex.unlock();
207  }
208 
209  protected:
217  virtual void render(RenderWObject* renderer, QGLContext* gw)
218  {
219  // Bringing the frame of reference at the center of the camera
220  wMatrix mtr = m_transformation * tm;
221  glPushMatrix();
222  renderer->container()->setupColorTexture(gw, renderer);
223  GLMultMatrix(&mtr[0][0]);
224 
225  // First of all drawing the camera as a small white box. The face in the
226  // direction of view (X axis) is painted half green: the green part is the
227  // one in the direction of the upvector (Z axis)
228  glBegin(GL_QUADS);
229  const float hside = linearCameraCubeSide / 2.0;
230 
231  // front (top part)
232  glColor3f(0.0, 1.0, 0.0);
233  glNormal3f(1.0, 0.0, 0.0);
234  glVertex3f( hside, -hside, hside);
235  glVertex3f( hside, -hside, 0.0);
236  glVertex3f( hside, hside, 0.0);
237  glVertex3f( hside, hside, hside);
238 
239  // front (bottom part)
240  glColor3f(1.0, 1.0, 1.0);
241  glNormal3f(1.0, 0.0, 0.0);
242  glVertex3f( hside, -hside, 0.0);
243  glVertex3f( hside, -hside, -hside);
244  glVertex3f( hside, hside, -hside);
245  glVertex3f( hside, hside, 0.0);
246 
247  // back
248  glNormal3f(-1.0, 0.0, 0.0);
249  glVertex3f(-hside, -hside, -hside);
250  glVertex3f(-hside, -hside, hside);
251  glVertex3f(-hside, hside, hside);
252  glVertex3f(-hside, hside, -hside);
253 
254  // top
255  glNormal3f(0.0, 1.0, 0.0);
256  glVertex3f(-hside, hside, hside);
257  glVertex3f( hside, hside, hside);
258  glVertex3f( hside, hside, -hside);
259  glVertex3f(-hside, hside, -hside);
260 
261  // bottom
262  glNormal3f(0.0, -1.0, 0.0);
263  glVertex3f(-hside, -hside, -hside);
264  glVertex3f( hside, -hside, -hside);
265  glVertex3f( hside, -hside, hside);
266  glVertex3f(-hside, -hside, hside);
267 
268  // right
269  glNormal3f(0.0, 0.0, 1.0);
270  glVertex3f( hside, -hside, hside);
271  glVertex3f(-hside, -hside, hside);
272  glVertex3f(-hside, hside, hside);
273  glVertex3f( hside, hside, hside);
274 
275  // left
276  glNormal3f(0.0, 0.0, -1.0);
277  glVertex3f( hside, -hside, -hside);
278  glVertex3f(-hside, -hside, -hside);
279  glVertex3f(-hside, hside, -hside);
280  glVertex3f( hside, hside, -hside);
281 
282  glEnd();
283 
284  // Now we draw white lines to separare the various sectors of the camera
285  // Disabling lighting here (we want pure lines no matter from where we look at them)
286  glPushAttrib(GL_LIGHTING_BIT);
287  glDisable(GL_LIGHTING);
288  glLineWidth(2.5);
289  glColor3f(1.0, 1.0, 1.0);
290 
291  // Drawing the lines
292  glBegin(GL_LINES);
293  for (unsigned int i = 0; i <= m_numReceptors; i++) {
294  const double curAngle = m_minAngle + double(i) * m_receptorRange;
295 
296  const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
297 
298  glVertex3f(0.0, 0.0, 0.0);
299  glVertex3f(lineEnd.x, lineEnd.y, lineEnd.z);
300  }
301  glEnd();
302 
303  // Now drawing the state of receptors. Here we also have to lock the semaphore for
304  // the m_receptors vector
305  m_receptorsMutex.lock();
306 
307  // Drawing the status
308  glBegin(GL_QUADS);
309  glNormal3f(0.0, 1.0, 0.0);
310  const double colorPatchAngle = m_receptorRange / 3.0;
311  const double colorPatchMinLength = linearCameraReceptorsLength / 3.0;
312  const double colorPatchMaxLength = 2.0 * linearCameraReceptorsLength / 3.0;
313  for (unsigned int i = 0; i < m_numReceptors; i++) {
314  const double curAngle = m_minAngle + double(i) * m_receptorRange;
315 
316  for (unsigned int c = 0; c < 3; c++) {
317  const double startAngle = curAngle + double(c) * colorPatchAngle;
318  const double endAngle = curAngle + double(c + 1) * colorPatchAngle;
319 
320  // Computing the four vertexes
321  const wVector v1 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMinLength);
322  const wVector v2 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMaxLength);
323  const wVector v3 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMaxLength);
324  const wVector v4 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMinLength);
325 
326  // Setting the color
327  switch (c) {
328  case 0:
329  glColor3f(m_receptors[i].redF(), 0.0, 0.0);
330  break;
331  case 1:
332  glColor3f(0.0, m_receptors[i].greenF(), 0.0);
333  break;
334  case 2:
335  glColor3f(0.0, 0.0, m_receptors[i].blueF());
336  break;
337  default:
338  break;
339  }
340 
341  // Drawing the patch
342  glVertex3f(v1.x, v1.y, v1.z);
343  glVertex3f(v2.x, v2.y, v2.z);
344  glVertex3f(v3.x, v3.y, v3.z);
345  glVertex3f(v4.x, v4.y, v4.z);
346  }
347  }
348  glEnd();
349  m_receptorsMutex.unlock();
350 
351  // Restoring lighting status
352  glPopAttrib();
353 
354  glPopMatrix();
355  }
356 
361 
367 
371  const double m_minAngle;
372 
376  const double m_maxAngle;
377 
381  const unsigned int m_numReceptors;
382 
388  const double m_receptorRange;
389 
393  QVector<QColor> m_receptors;
394 
402  };
403 }
404 
405 using namespace __LinearCamera_internal;
406 
407 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, QColor backgroundColor) :
409  m_receptors(numReceptors),
410  m_object(obj),
411  m_transformation(mtr),
412  m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
413  m_numReceptors(numReceptors),
414  m_backgroundColor(backgroundColor),
415  m_apertureMin(-m_aperture / 2.0),
416  m_apertureMax(m_aperture / 2.0),
417  m_receptorRange(m_aperture / double(m_numReceptors)),
418  m_arena(NULL),
419  m_drawCamera(false),
420  m_graphicalCamera(NULL)
421 
422 {
423  // Stating which resources we use here
424  addUsableResource("arena");
425 }
426 
428 {
429  // Nothing to do here
430 }
431 
432 namespace {
433  // This namespace contains some structures used in the LinearCamera::update() function
434 
435  // The structure containing a color and the range of the camera field hit by this color.
436  // It also contains the distance from the camera for ordering.
437  struct ColorRangeAndDistance
438  {
439  ColorRangeAndDistance() :
440  color(),
441  minAngle(0.0),
442  maxAngle(0.0),
443  distance(0.0)
444  {
445  }
446 
447  ColorRangeAndDistance(QColor c, double min, double max, double d) :
448  color(c),
449  minAngle(min),
450  maxAngle(max),
451  distance(d)
452  {
453  }
454 
455  // This is to order objects of this type
456  bool operator<(const ColorRangeAndDistance& other) const
457  {
458  return (distance < other.distance);
459  }
460 
461  QColor color;
462  double minAngle;
463  double maxAngle;
464  double distance;
465  };
466 
467  // An helper class to ease computations with multiple intervals. This class starts with a single
468  // interval and the allows to remove portions. When removing an interval, returns the portion
469  // of the initial range that was actually removed
470  class MultiInterval
471  {
472  private:
473  struct SingleInterval
474  {
475  double start;
476  double end;
477  };
478 
479  public:
480  MultiInterval() :
481  m_originalSize(0.0),
482  m_intervals()
483  {
484  }
485 
486  void initMultiInterval(double start, double end)
487  {
488  m_originalSize = end - start;
489 
490  SingleInterval i;
491  i.start = start;
492  i.end = end;
493  m_intervals.append(i);
494  }
495 
496  double removeInterval(double start, double end)
497  {
498  double removedSize = 0.0;
499 
500  // We exit from the cycle when both these variables are true: intervals are ordered so,
501  // if we have found both the interval for start and the one for end we can exit
502  bool foundStartInIntervals = false;
503  bool foundEndInIntervals = false;
504  QLinkedList<SingleInterval>::iterator it = m_intervals.begin();
505  while ((it != m_intervals.end()) && (!foundStartInIntervals || !foundEndInIntervals)) {
506  if ((start <= it->start) && (end >= it->end)) {
507  // Removing the whole interval and continuing
508  removedSize += it->end - it->start;
509  it = m_intervals.erase(it);
510  } else if ((start >= it->start) && (start < it->end) && (end > it->start) && (end <= it->end)) {
511  // Here we have to split the interval in two. We put the two new intervals in place
512  // of the old one
513  removedSize += end - start;
514  SingleInterval i1, i2;
515  i1.start = it->start;
516  i1.end = start;
517  i2.start = end;
518  i2.end = it->end;
519  it = m_intervals.erase(it);
520  // Going one step back to insert the two new items
521  --it;
522  it = m_intervals.insert(it, i1);
523  it = m_intervals.insert(it, i2);
524 
525  // This interval was completely inside another interval, so no other interval will
526  // be intersected and we can exit from the cycle
527  foundStartInIntervals = true;
528  foundEndInIntervals = true;
529  } else if ((start > it->start) && (start < it->end)) {
530  // Here we have to reduce the interval by setting the new end
531  removedSize += it->end - start;
532  it->end = start;
533  foundStartInIntervals = true;
534  ++it;
535  } else if ((end > it->start) && (end < it->end)) {
536  // Here we have to reduce the interval, by setting the new start
537  removedSize += end - it->start;
538  it->start = end;
539  foundEndInIntervals = true;
540  ++it;
541  } else {
542  // Simply incrementing the iterator
543  ++it;
544  }
545  }
546 
547  return removedSize / m_originalSize;
548  }
549 
550  private:
551  double m_originalSize;
552  // Intervals will always be ordered from the one with the lowest start to the one with the highest start.
553  // Moreover two intervals will never intersect
554  QLinkedList<SingleInterval> m_intervals;
555  };
556 
557  // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
558  // are used to store the current portion of the receptor for which we already know the color, while
559  // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
560  struct ColorsInReceptor
561  {
562  MultiInterval curInterval;
563 
564  struct ColorAndFraction {
565  ColorAndFraction() :
566  color(),
567  fraction(0.0)
568  {
569  }
570 
571  ColorAndFraction(QColor c, double f) :
572  color(c),
573  fraction(f)
574  {
575  }
576 
577  QColor color;
578  double fraction;
579  };
580  QList<ColorAndFraction> colorsAndFractions;
581  };
582 }
583 
585 {
586 #ifdef __GNUC__
587  #warning APPENA I ROBOT SONO NELLA LISTA DEGLI OGGETTI, BISOGNA RICORDARSI DI ESCLUDERE L OGGETTO CUI LA CAMERA È ATTACCATA QUANDO SI CALCOLA L ATTIVAZIONE
588 #endif
589  // Getting the list of objects from the arena (if we have the pointer to the arena)
590  if (m_arena == NULL) {
591  m_receptors.fill(m_backgroundColor);
592 
593  return;
594  }
595  const QVector<PhyObject2DWrapper*>& objectsList = m_arena->getObjects();
596 
597  // If no object is present, we can fill the receptors list with background colors and return
598  if (objectsList.size() == 0) {
599  m_receptors.fill(m_backgroundColor);
600 
601  return;
602  }
603 
604  // Updating the matrix with the current camera position
605  wMatrix currentMtr = m_transformation * m_object->matrix();
606 
607  // First of all we need to compute which color hits each receptor
608 
609  // Now filling the list with colors, ranges and distances. If an object is perceived at the
610  // extremities of the aperture, it is split in two different ColorRangeAndDistance objects
611  QList<ColorRangeAndDistance> colorsRangesAndDistances;
612 
613  // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
614  // this is not correct (occlusion doesn't work well) and so should be changed
615  for (int i = 0; i < objectsList.size(); i++) {
616  const QColor color = objectsList[i]->color();
617  double minAngle;
618  double maxAngle;
619  double distance;
620  objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, minAngle, maxAngle, distance);
621 
622  // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
623  if (distance < 0.0) {
624  continue;
625  }
626 
627  // If the minAngle is greater than the maxAngle, splitting in two, so that we do not have to
628  // make special cases in the subsequent part of the function. Here we also check if the object
629  // is completely outside the view field or not (in the first case we don't add it to the list)
630  // We just check if the object is at least partially visible, we don't set the limits to be
631  // within the view field
632  if (minAngle > maxAngle) {
633  if ((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) {
634  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
635  }
636  if ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax)) {
637  colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
638  }
639  } else {
640  if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
641  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, maxAngle, distance));
642  }
643  }
644  }
645 
646  // Ordering colors by distance from the camera
647  qSort(colorsRangesAndDistances);
648 
649  // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
650  // the whole field with a valid color
651  colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
652 
653  // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
654  // in each receptor
655  QVector<ColorsInReceptor> colorsInReceptors(m_numReceptors);
656  for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
657  // Computing the index of receptors which are interested by this color
658  const int minIndex = max(0, floor((it->minAngle - m_apertureMin) / m_receptorRange));
659  const int maxIndex = min(m_numReceptors - 1, floor((it->maxAngle - m_apertureMin) / m_receptorRange));
660 
661  // Now cycling over the computed receptors in the colorsInReceptors list to fill it
662  for (int i = minIndex; i <= maxIndex; i++) {
663  if (colorsInReceptors[i].colorsAndFractions.size() == 0) {
664  // This is the first color in the receptor, we have to initialize the interval
665  const double receptorMin = m_apertureMin + m_receptorRange * double(i);
666  const double receptorMax = m_apertureMin + m_receptorRange * double(i + 1);
667  colorsInReceptors[i].curInterval.initMultiInterval(receptorMin, receptorMax);
668  }
669 
670  const double fraction = min(1.0, colorsInReceptors[i].curInterval.removeInterval(it->minAngle, it->maxAngle));
671  colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
672  }
673  }
674 
675  // The final step is to compute the resulting color for each receptor. See class description for a comment
676  // on this procedure
677  for (unsigned int i = 0; i < m_numReceptors; i++) {
678  double red = 0.0;
679  double green = 0.0;
680  double blue = 0.0;
681  for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
682  red += it->color.redF() * it->fraction;
683  green += it->color.greenF() * it->fraction;
684  blue += it->color.blueF() * it->fraction;
685  }
686  m_receptors[i] = QColor::fromRgbF(min(1.0, red), min(1.0, green), min(1.0, blue));
687  }
688 
689  // Updating graphics if we have to
690  if (m_drawCamera) {
691  m_graphicalCamera->setPerceivedColors(m_receptors);
692  }
693 }
694 
696 {
697  if (m_drawCamera == d) {
698  return;
699  }
700 
701  m_drawCamera = d;
702  if (m_drawCamera) {
703  m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
704  } else {
705  delete m_graphicalCamera;
706  }
707 }
708 
709 void LinearCamera::resourceChanged(QString resourceName, ResourceChangeType changeType)
710 {
711  if (resourceName == "arena") {
712  switch (changeType) {
713  case Created:
714  case Modified:
715  m_arena = getResource<Arena>();
716  break;
717  case Deleted:
718  m_arena = NULL;
719  break;
720  }
721  } else {
722  Logger::info("Unknown resource " + resourceName + " (in LinearCamera)");
723  }
724 }
725 
727  m_filename(filename),
728  m_numIR(0),
729  m_numSamplingAngles(0),
730  m_numDistances(0),
731  m_initialDistance(0.0f),
732  m_distanceInterval(0.0f),
733  m_finalDistance(0.0f),
734  m_activations()
735 {
736  // The maximum length of a line. This value is greater than needed, we use it just
737  // to avoid problems with maalformed files
738  const int maxLineLength = 1024;
739 
740  // Opening the input file
741  QFile file(m_filename);
742  if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
743  throw SampleFileLoadingException(m_filename.toAscii().data(), "Cannot open file for reading");
744  }
745 
746  // Now opening a text stream on the file to read it
747  QTextStream in(&file);
748 
749  // Reading the first line, the one with configuration parameters and splitting it
750  QStringList confs = in.readLine(maxLineLength).split(" ", QString::SkipEmptyParts);
751  if (confs.size() != 5) {
752  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toAscii().data());
753  }
754 
755  // Now converting the elements of the configuration line
756  bool ok;
757  m_numIR = confs[0].toUInt(&ok);
758  if (!ok) {
759  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toAscii().data());
760  }
761  m_numSamplingAngles = confs[1].toUInt(&ok);
762  if (!ok) {
763  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toAscii().data());
764  }
765  m_numDistances = confs[2].toUInt(&ok);
766  if (!ok) {
767  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toAscii().data());
768  }
769  m_initialDistance = confs[3].toFloat(&ok) / 1000.0;
770  if (!ok) {
771  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toAscii().data());
772  }
773  m_distanceInterval = confs[4].toFloat(&ok) / 1000.0;
774  if (!ok) {
775  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toAscii().data());
776  }
777  m_finalDistance = m_initialDistance + (m_numDistances - 1) * m_distanceInterval;
778 
779  // Resizing the vector of activations
780  m_activations.resize(m_numIR * m_numSamplingAngles * m_numDistances);
781  m_nullActivations.fill(0, m_numIR);
782 
783  // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
784  // in a "creative" way...
785  int i = 0; // The index over the m_activations array
786  for (unsigned int dist = 0; dist < m_numDistances; dist++) {
787  QString turnLine = in.readLine(maxLineLength);
788  QStringList turnLineSplitted = turnLine.split(" ", QString::SkipEmptyParts);
789 
790  // The line we just read should have been split in two. The first element should
791  // be equal to "TURN", the second one to the current dist
792  if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
793  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid TURN line: \"" + turnLine + "\"").toAscii().data());
794  }
795 
796  // Now reading the block for the current distance
797  for (unsigned int ang = 0; ang < m_numSamplingAngles; ang++) {
798  QString activationsLine = in.readLine(maxLineLength);
799  QStringList activationsLineSplitted = activationsLine.split(" ", QString::SkipEmptyParts);
800 
801  // activationsLineSplitted should have m_numIR elements, all integers between 0 and 1023
802  if (activationsLineSplitted.size() != m_numIR) {
803  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toAscii().data());
804  }
805  // Reading activations
806  for (unsigned int id = 0; id < m_numIR; id++) {
807  bool ok;
808  const unsigned int act = activationsLineSplitted[id].toUInt(&ok);
809  if ((!ok) || (act > 1023)) {
810  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toAscii().data());
811  }
812  m_activations[i++] = act;
813  }
814  }
815  }
816 
817  // The final row in the file should be "END"
818  QString finalLine = in.readLine(maxLineLength);
819  if (finalLine != "END") {
820  throw SampleFileLoadingException(m_filename.toAscii().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toAscii().data());
821  }
822 }
823 
825 {
826  // Nothing to do here
827 }
828 
829 unsigned int SampledIRDataLoader::getActivation(unsigned int i, real dist, real ang) const
830 {
831  // Using the other version of the getActivation function
832  QVector<unsigned int>::const_iterator it = getActivation(dist, ang);
833 
834  return *(it + i);
835 }
836 
837 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const
838 {
839  const real distIndex = (dist - m_initialDistance) / m_distanceInterval;
840  const unsigned int d = (distIndex < 0.0) ? 0 : (unsigned int) distIndex;
841 
842  // If we are over the maximum distance, returning all zeros
843  if (d >= m_numDistances) {
844  return m_nullActivations.begin();
845  }
846 
847  // We first have to restrict the angle between 0.0 and 2*PI, then we can compute the index
848  const real normAng = ang - (floor(ang / (2.0 * PI_GRECO)) * 2.0 * PI_GRECO);
849  const real angIndex = (normAng / (2.0 * PI_GRECO)) * real(m_numSamplingAngles);
850  const unsigned int a = (angIndex < 0.0) ? 0 : (unsigned int) angIndex;
851 
852  return m_activations.begin() + getLinearIndex(0, a, d);
853 }
854 
855 unsigned int SampledIRDataLoader::getLinearIndex(unsigned int id, unsigned int ang, unsigned int dist) const
856 {
857  return (dist * m_numSamplingAngles + ang) * m_numIR + id;
858 }
859 
860 void SampledIRDataLoader::getIndexes(unsigned int i, unsigned int &id, unsigned int &ang, unsigned int &dist) const
861 {
862  id = i % m_numIR;
863  ang = (i / m_numIR) % m_numSamplingAngles;
864  dist = i / (m_numIR * m_numSamplingAngles);
865 }
866 
867 } // end namespace farsa