sensors.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #include "sensors.h"
25 #include "configurationhelper.h"
26 #include "motorcontrollers.h"
27 #include "logger.h"
28 #include "graphicalwobject.h"
29 #include "arena.h"
30 #include <QStringList>
31 #include <QList>
32 #include <QtAlgorithms>
33 #include <limits>
34 #include <cmath>
35 #include <QLinkedList>
36 #include <QFile>
37 #include <QTextStream>
38 
39 namespace farsa {
40 
42  Sensor(params, prefix),
43  m_additionalInputs(ConfigurationHelper::getUnsignedInt(params, prefix + "additionalInputs", 1)),
44  m_neuronsIteratorResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator"))),
45  m_additionalInputsResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "additionalInputsResource", "additionalInputs"))),
46  m_neuronsIterator(NULL)
47 {
49 
50  for (unsigned int i = 0; i < m_additionalInputs.size(); i++) {
51  m_additionalInputs[i] = 0.0;
52  }
53 }
54 
56 {
57  // Removing resources
58  try {
60  } catch (...) {
61  // Doing nothing, this is here just to prevent throwing an exception from the destructor
62  }
63 }
64 
65 void FakeSensor::save(ConfigurationParameters& params, QString prefix)
66 {
67  Sensor::save( params, prefix );
68  params.startObjectParameters(prefix, "FakeSensor", this);
69  params.createParameter(prefix, "additionalInputs", QString::number(m_additionalInputs.size()));
70  params.createParameter(prefix, "neuronsIterator", m_neuronsIteratorResource);
71  params.createParameter(prefix, "additionalInputsResource", m_additionalInputsResource);
72 }
73 
74 void FakeSensor::describe(QString type)
75 {
76  Sensor::describe(type);
77 
78  Descriptor d = addTypeDescription(type, "Adds input neurons that can be used for custom operations", "With this sensor you can specify how many additional inputs are needed in the controller. This also declares a resource that can be used to access the additional inputs");
79  d.describeInt("additionalInputs").def(1).limits(1,100).props(IsMandatory).help("The number of additional inputs that will be added to the controller (default 1)");
80  d.describeString("neuronsIterator").def("neuronsIterator").help("The name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
81  d.describeString("additionalInputsResource").def("additionalInputs").help("The name of the resource associated with the vector of additional inputs (default is \"additionalInputs\")");
82 }
83 
85 {
86  // Checking all resources we need exist
88 
89  ResourcesLocker locker(this);
90 
91  // Copying the output inside the vector of additional outputs
93  for (unsigned int i = 0; i < m_additionalInputs.size(); i++, m_neuronsIterator->nextNeuron()) {
95  }
96 }
97 
99 {
100  return m_additionalInputs.size();
101 }
102 
104 {
105  // Calling parent function
107 
108  // Now declaring our resource
110 }
111 
112 void FakeSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
113 {
114  if (changeType == Deleted) {
116  return;
117  }
118 
119  if (resourceName == m_neuronsIteratorResource) {
120  m_neuronsIterator = getResource<NeuronsIterator>();
122  for(int i = 0; i < size(); i++, m_neuronsIterator->nextNeuron()) {
123  m_neuronsIterator->setGraphicProperties("Fk" + QString::number(i), 0.0, 1.0, Qt::red);
124  }
125  } else if (resourceName != m_additionalInputsResource) {
126  Logger::info("Unknown resource " + resourceName + " for " + name());
127  }
128 }
129 
130 //ObjectPositionSensor : begin implementation
131 // it returns the absolute coordinate of an object into the world
133  Sensor(params, prefix) {
134  neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
135  objectName = ConfigurationHelper::getString( params, prefix+"object", "object" );
136  QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
137  QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
138  if ( vec1.size() == 3 && vec2.size() == 3 ) {
139  linearize = true;
140  bbMin = wVector( vec1[0], vec1[1], vec1[2] );
141  bbMax = wVector( vec2[0], vec2[1], vec2[2] );
142  } else {
143  linearize = false;
144  if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
145  Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
146  }
147  }
148 
149  // Declaring the resources that are needed here
150  usableResources( QStringList() << objectName << neuronsIteratorResource );
151 }
152 
154  // nothing to do
155 }
156 
157 void ObjectPositionSensor::describe( QString type ) {
158  Sensor::describe( type );
159  Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
160  d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
161  d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
162  d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
163  d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
164 }
165 
167  // Checking all resources we need exist
169 
170  // Acquiring the lock to get resources
171  ResourcesLocker locker( this );
172 
173  WObject* object = getResource<WObject>( objectName );
174  wVector pos = object->matrix().w_pos;
175  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
176  evonetIt->setCurrentBlock( name() );
177  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
178  if ( linearize ) {
179  // linearize into [0,1]
180  evonetIt->setInput( linearMap( pos[i], bbMin[i], bbMax[i], 0, 1 ) );
181  } else {
182  evonetIt->setInput( pos[i] );
183  }
184  }
185 }
186 
188  return 3;
189 }
190 
191 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
192  if (changeType == Deleted) {
194  return;
195  }
196 
197  if (resourceName == objectName) {
198  // Nothing to do here, we get the object with getResource() in update()
199  } else if (resourceName == neuronsIteratorResource) {
200  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
201  evonetIt->setCurrentBlock( name() );
202  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
203  evonetIt->setGraphicProperties( QString("obj")+QString::number(i), -10.0, 10.0, Qt::red );
204  }
205  } else {
206  Logger::info("Unknown resource " + resourceName + " for " + name());
207  }
208 }
209 
211 {
212  Sensor::save( params, prefix );
213  params.startObjectParameters( prefix, "ObjectPositionSensor", this );
214  params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
215  params.createParameter( prefix, "object", objectName );
216  if ( linearize ) {
217  params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
218  params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
219  }
220 }
221 //ObjectPositionSensor : end implementation
222 
223 namespace __LinearCamera_internal {
224  #ifndef GLMultMatrix
225  #define GLMultMatrix glMultMatrixf
226  // for double glMultMatrixd
227  #endif
228 
232  const float linearCameraCubeSide = 0.02f;
233 
238  const float linearCameraReceptorsLength = 0.1f;
239 
244  {
245  public:
259  LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
260  GraphicalWObject(object->world(), name),
261  m_object(object),
262  m_minAngle(minAngle),
263  m_maxAngle(maxAngle),
264  m_numReceptors(numReceptors),
266  m_receptors(m_numReceptors, Qt::black)
267  {
268  // Attaching to object (which also becomes our owner)
269  attachToObject(m_object, true, transformation);
270 
271  // We also use our own color and texture
273  setTexture("");
274  setColor(Qt::white);
275  }
276 
281  {
282  }
283 
290  void setPerceivedColors(const QVector<QColor>& receptors)
291  {
292  m_receptorsMutex.lock();
293  m_receptors = receptors;
294  m_receptorsMutex.unlock();
295  }
296 
297  protected:
305  virtual void render(RenderWObject* renderer, QGLContext* gw)
306  {
307  // Bringing the frame of reference at the center of the camera
308  glPushMatrix();
309  renderer->container()->setupColorTexture(gw, renderer);
310  GLMultMatrix(&tm[0][0]);
311 
312  // First of all drawing the camera as a small white box. The face in the
313  // direction of view (X axis) is painted half green: the green part is the
314  // one in the direction of the upvector (Z axis)
315  glBegin(GL_QUADS);
316  const float hside = linearCameraCubeSide / 2.0;
317 
318  // front (top part)
319  glColor3f(0.0, 1.0, 0.0);
320  glNormal3f(1.0, 0.0, 0.0);
321  glVertex3f( hside, -hside, hside);
322  glVertex3f( hside, -hside, 0.0);
323  glVertex3f( hside, hside, 0.0);
324  glVertex3f( hside, hside, hside);
325 
326  // front (bottom part)
327  glColor3f(1.0, 1.0, 1.0);
328  glNormal3f(1.0, 0.0, 0.0);
329  glVertex3f( hside, -hside, 0.0);
330  glVertex3f( hside, -hside, -hside);
331  glVertex3f( hside, hside, -hside);
332  glVertex3f( hside, hside, 0.0);
333 
334  // back
335  glNormal3f(-1.0, 0.0, 0.0);
336  glVertex3f(-hside, -hside, -hside);
337  glVertex3f(-hside, -hside, hside);
338  glVertex3f(-hside, hside, hside);
339  glVertex3f(-hside, hside, -hside);
340 
341  // top
342  glNormal3f(0.0, 1.0, 0.0);
343  glVertex3f(-hside, hside, hside);
344  glVertex3f( hside, hside, hside);
345  glVertex3f( hside, hside, -hside);
346  glVertex3f(-hside, hside, -hside);
347 
348  // bottom
349  glNormal3f(0.0, -1.0, 0.0);
350  glVertex3f(-hside, -hside, -hside);
351  glVertex3f( hside, -hside, -hside);
352  glVertex3f( hside, -hside, hside);
353  glVertex3f(-hside, -hside, hside);
354 
355  // right
356  glNormal3f(0.0, 0.0, 1.0);
357  glVertex3f( hside, -hside, hside);
358  glVertex3f(-hside, -hside, hside);
359  glVertex3f(-hside, hside, hside);
360  glVertex3f( hside, hside, hside);
361 
362  // left
363  glNormal3f(0.0, 0.0, -1.0);
364  glVertex3f( hside, -hside, -hside);
365  glVertex3f(-hside, -hside, -hside);
366  glVertex3f(-hside, hside, -hside);
367  glVertex3f( hside, hside, -hside);
368 
369  glEnd();
370 
371  // Now we draw white lines to separare the various sectors of the camera
372  // Disabling lighting here (we want pure lines no matter from where we look at them)
373  glPushAttrib(GL_LIGHTING_BIT);
374  glDisable(GL_LIGHTING);
375  glLineWidth(2.5);
376  glColor3f(1.0, 1.0, 1.0);
377 
378  // Drawing the lines
379  glBegin(GL_LINES);
380  for (unsigned int i = 0; i <= m_numReceptors; i++) {
381  const double curAngle = m_minAngle + double(i) * m_receptorRange;
382 
383  const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
384 
385  glVertex3f(0.0, 0.0, 0.0);
386  glVertex3f(lineEnd.x, lineEnd.y, lineEnd.z);
387  }
388  glEnd();
389 
390  // Now drawing the state of receptors. Here we also have to lock the semaphore for
391  // the m_receptors vector
392  m_receptorsMutex.lock();
393 
394  // Drawing the status
395  glBegin(GL_QUADS);
396  glNormal3f(0.0, 1.0, 0.0);
397  const double colorPatchAngle = m_receptorRange / 3.0;
398  const double colorPatchMinLength = linearCameraReceptorsLength / 3.0;
399  const double colorPatchMaxLength = 2.0 * linearCameraReceptorsLength / 3.0;
400  for (unsigned int i = 0; i < m_numReceptors; i++) {
401  const double curAngle = m_minAngle + double(i) * m_receptorRange;
402 
403  for (unsigned int c = 0; c < 3; c++) {
404  const double startAngle = curAngle + double(c) * colorPatchAngle;
405  const double endAngle = curAngle + double(c + 1) * colorPatchAngle;
406 
407  // Computing the four vertexes
408  const wVector v1 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMinLength);
409  const wVector v2 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMaxLength);
410  const wVector v3 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMaxLength);
411  const wVector v4 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMinLength);
412 
413  // Setting the color
414  switch (c) {
415  case 0:
416  glColor3f(m_receptors[i].redF(), 0.0, 0.0);
417  break;
418  case 1:
419  glColor3f(0.0, m_receptors[i].greenF(), 0.0);
420  break;
421  case 2:
422  glColor3f(0.0, 0.0, m_receptors[i].blueF());
423  break;
424  default:
425  break;
426  }
427 
428  // Drawing the patch
429  glVertex3f(v1.x, v1.y, v1.z);
430  glVertex3f(v2.x, v2.y, v2.z);
431  glVertex3f(v3.x, v3.y, v3.z);
432  glVertex3f(v4.x, v4.y, v4.z);
433  }
434  }
435  glEnd();
436  m_receptorsMutex.unlock();
437 
438  // Restoring lighting status
439  glPopAttrib();
440 
441  glPopMatrix();
442  }
443 
448 
452  const double m_minAngle;
453 
457  const double m_maxAngle;
458 
462  const unsigned int m_numReceptors;
463 
469  const double m_receptorRange;
470 
474  QVector<QColor> m_receptors;
475 
483  };
484 }
485 
486 using namespace __LinearCamera_internal;
487 
488 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, QColor backgroundColor) :
490  m_receptors(numReceptors),
491  m_object(obj),
492  m_transformation(mtr),
493  m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
494  m_numReceptors(numReceptors),
495  m_backgroundColor(backgroundColor),
496  m_apertureMin(-m_aperture / 2.0),
497  m_apertureMax(m_aperture / 2.0),
498  m_receptorRange(m_aperture / double(m_numReceptors)),
499  m_arena(NULL),
500  m_drawCamera(false),
501  m_graphicalCamera(NULL)
502 
503 {
504  // Stating which resources we use here
505  addUsableResource("arena");
506 }
507 
509 {
510  // Nothing to do here
511 }
512 
513 namespace {
514  // This namespace contains some structures used in the LinearCamera::update() function
515 
516  // The structure containing a color and the range of the camera field hit by this color.
517  // It also contains the distance from the camera for ordering.
518  struct ColorRangeAndDistance
519  {
520  ColorRangeAndDistance() :
521  color(),
522  minAngle(0.0),
523  maxAngle(0.0),
524  distance(0.0)
525  {
526  }
527 
528  ColorRangeAndDistance(QColor c, double min, double max, double d) :
529  color(c),
530  minAngle(min),
531  maxAngle(max),
532  distance(d)
533  {
534  }
535 
536  // This is to order objects of this type
537  bool operator<(const ColorRangeAndDistance& other) const
538  {
539  return (distance < other.distance);
540  }
541 
542  QColor color;
543  double minAngle;
544  double maxAngle;
545  double distance;
546  };
547 
548  // An helper class to ease computations with multiple intervals. This class starts with a single
549  // interval and the allows to remove portions. When removing an interval, returns the portion
550  // of the initial range that was actually removed
551  class MultiInterval
552  {
553  private:
554  struct SingleInterval
555  {
556  double start;
557  double end;
558  };
559 
560  public:
561  MultiInterval() :
562  m_originalSize(0.0),
563  m_intervals()
564  {
565  }
566 
567  void initMultiInterval(double start, double end)
568  {
569  m_originalSize = end - start;
570 
571  SingleInterval i;
572  i.start = start;
573  i.end = end;
574  m_intervals.append(i);
575  }
576 
577  double removeInterval(double start, double end)
578  {
579  double removedSize = 0.0;
580 
581  // We exit from the cycle when both these variables are true: intervals are ordered so,
582  // if we have found both the interval for start and the one for end we can exit
583  bool foundStartInIntervals = false;
584  bool foundEndInIntervals = false;
585  QLinkedList<SingleInterval>::iterator it = m_intervals.begin();
586  while ((it != m_intervals.end()) && (!foundStartInIntervals || !foundEndInIntervals)) {
587  if ((start <= it->start) && (end >= it->end)) {
588  // Removing the whole interval and continuing
589  removedSize += it->end - it->start;
590  it = m_intervals.erase(it);
591  } else if ((start >= it->start) && (start < it->end) && (end > it->start) && (end <= it->end)) {
592  // Here we have to split the interval in two. We put the two new intervals in place
593  // of the old one
594  removedSize += end - start;
595  SingleInterval i1, i2;
596  i1.start = it->start;
597  i1.end = start;
598  i2.start = end;
599  i2.end = it->end;
600  it = m_intervals.erase(it);
601  // Going one step back to insert the two new items
602  --it;
603  it = m_intervals.insert(it, i1);
604  it = m_intervals.insert(it, i2);
605 
606  // This interval was completely inside another interval, so no other interval will
607  // be intersected and we can exit from the cycle
608  foundStartInIntervals = true;
609  foundEndInIntervals = true;
610  } else if ((start > it->start) && (start < it->end)) {
611  // Here we have to reduce the interval by setting the new end
612  removedSize += it->end - start;
613  it->end = start;
614  foundStartInIntervals = true;
615  ++it;
616  } else if ((end > it->start) && (end < it->end)) {
617  // Here we have to reduce the interval, by setting the new start
618  removedSize += end - it->start;
619  it->start = end;
620  foundEndInIntervals = true;
621  ++it;
622  } else {
623  // Simply incrementing the iterator
624  ++it;
625  }
626  }
627 
628  return removedSize / m_originalSize;
629  }
630 
631  private:
632  double m_originalSize;
633  // Intervals will always be ordered from the one with the lowest start to the one with the highest start.
634  // Moreover two intervals will never intersect
635  QLinkedList<SingleInterval> m_intervals;
636  };
637 
638  // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
639  // are used to store the current portion of the receptor for which we already know the color, while
640  // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
641  struct ColorsInReceptor
642  {
643  MultiInterval curInterval;
644 
645  struct ColorAndFraction {
646  ColorAndFraction() :
647  color(),
648  fraction(0.0)
649  {
650  }
651 
652  ColorAndFraction(QColor c, double f) :
653  color(c),
654  fraction(f)
655  {
656  }
657 
658  QColor color;
659  double fraction;
660  };
661  QList<ColorAndFraction> colorsAndFractions;
662  };
663 }
664 
666 {
667 #ifdef __GNUC__
668  #warning APPENA I ROBOT SONO NELLA LISTA DEGLI OGGETTI, BISOGNA RICORDARSI DI ESCLUDERE L OGGETTO CUI LA CAMERA È ATTACCATA QUANDO SI CALCOLA L ATTIVAZIONE
669 #endif
670  // Getting the list of objects from the arena (if we have the pointer to the arena)
671  if (m_arena == NULL) {
672  m_receptors.fill(m_backgroundColor);
673 
674  return;
675  }
676  const QVector<PhyObject2DWrapper*>& objectsList = m_arena->getObjects();
677 
678  // If no object is present, we can fill the receptors list with background colors and return
679  if (objectsList.size() == 0) {
680  m_receptors.fill(m_backgroundColor);
681 
682  return;
683  }
684 
685  // Updating the matrix with the current camera position
686  wMatrix currentMtr = m_transformation * m_object->matrix();
687 
688  // First of all we need to compute which color hits each receptor
689 
690  // Now filling the list with colors, ranges and distances. If an object is perceived at the
691  // extremities of the aperture, it is split in two different ColorRangeAndDistance objects
692  QList<ColorRangeAndDistance> colorsRangesAndDistances;
693 
694  // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
695  // this is not correct (occlusion doesn't work well) and so should be changed
696  for (int i = 0; i < objectsList.size(); i++) {
697  const QColor color = objectsList[i]->color();
698  double minAngle;
699  double maxAngle;
700  double distance;
701  objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, minAngle, maxAngle, distance);
702 
703  // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
704  if (distance < 0.0) {
705  continue;
706  }
707 
708  // If the minAngle is greater than the maxAngle, splitting in two, so that we do not have to
709  // make special cases in the subsequent part of the function. Here we also check if the object
710  // is completely outside the view field or not (in the first case we don't add it to the list)
711  // We just check if the object is at least partially visible, we don't set the limits to be
712  // within the view field
713  if (minAngle > maxAngle) {
714  if ((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) {
715  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
716  }
717  if ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax)) {
718  colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
719  }
720  } else {
721  if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
722  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, maxAngle, distance));
723  }
724  }
725  }
726 
727  // Ordering colors by distance from the camera
728  qSort(colorsRangesAndDistances);
729 
730  // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
731  // the whole field with a valid color
732  colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
733 
734  // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
735  // in each receptor
736  QVector<ColorsInReceptor> colorsInReceptors(m_numReceptors);
737  for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
738  // Computing the index of receptors which are interested by this color
739  const int minIndex = max(0, floor((it->minAngle - m_apertureMin) / m_receptorRange));
740  const int maxIndex = min(m_numReceptors - 1, floor((it->maxAngle - m_apertureMin) / m_receptorRange));
741 
742  // Now cycling over the computed receptors in the colorsInReceptors list to fill it
743  for (int i = minIndex; i <= maxIndex; i++) {
744  if (colorsInReceptors[i].colorsAndFractions.size() == 0) {
745  // This is the first color in the receptor, we have to initialize the interval
746  const double receptorMin = m_apertureMin + m_receptorRange * double(i);
747  const double receptorMax = m_apertureMin + m_receptorRange * double(i + 1);
748  colorsInReceptors[i].curInterval.initMultiInterval(receptorMin, receptorMax);
749  }
750 
751  const double fraction = min(1.0, colorsInReceptors[i].curInterval.removeInterval(it->minAngle, it->maxAngle));
752  colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
753  }
754  }
755 
756  // The final step is to compute the resulting color for each receptor. See class description for a comment
757  // on this procedure
758  for (unsigned int i = 0; i < m_numReceptors; i++) {
759  double red = 0.0;
760  double green = 0.0;
761  double blue = 0.0;
762  for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
763  red += it->color.redF() * it->fraction;
764  green += it->color.greenF() * it->fraction;
765  blue += it->color.blueF() * it->fraction;
766  }
767  m_receptors[i] = QColor::fromRgbF(min(1.0, red), min(1.0, green), min(1.0, blue));
768  }
769 
770  // Updating graphics if we have to
771  if (m_drawCamera) {
772  m_graphicalCamera->setPerceivedColors(m_receptors);
773  }
774 }
775 
777 {
778  if (m_drawCamera == d) {
779  return;
780  }
781 
782  m_drawCamera = d;
783  if (m_drawCamera) {
784  m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
785  } else {
786  delete m_graphicalCamera;
787  }
788 }
789 
790 void LinearCamera::resourceChanged(QString resourceName, ResourceChangeType changeType)
791 {
792  if (resourceName == "arena") {
793  switch (changeType) {
794  case Created:
795  case Modified:
796  m_arena = getResource<Arena>();
797  break;
798  case Deleted:
799  m_arena = NULL;
800  break;
801  }
802  } else {
803  Logger::info("Unknown resource " + resourceName + " (in LinearCamera)");
804  }
805 }
806 
808  m_filename(filename),
809  m_numIR(0),
810  m_numSamplingAngles(0),
811  m_numDistances(0),
812  m_initialDistance(0.0f),
813  m_distanceInterval(0.0f),
814  m_finalDistance(0.0f),
815  m_activations(),
816  m_nullActivations()
817 {
818  // The maximum length of a line. This value is greater than needed, we use it just
819  // to avoid problems with maalformed files
820  const int maxLineLength = 1024;
821 
822  // Opening the input file
823  QFile file(m_filename);
824  if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
825  throw SampleFileLoadingException(m_filename.toAscii().data(), "Cannot open file for reading");
826  }
827 
828  // Now opening a text stream on the file to read it
829  QTextStream in(&file);
830 
831  // Reading the first line, the one with configuration parameters and splitting it
832  QStringList confs = in.readLine(maxLineLength).split(" ", QString::SkipEmptyParts);
833  if (confs.size() != 5) {
834  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toAscii().data());
835  }
836 
837  // Now converting the elements of the configuration line
838  bool ok;
839  m_numIR = confs[0].toUInt(&ok);
840  if (!ok) {
841  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toAscii().data());
842  }
843  m_numSamplingAngles = confs[1].toUInt(&ok);
844  if (!ok) {
845  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toAscii().data());
846  }
847  m_numDistances = confs[2].toUInt(&ok);
848  if (!ok) {
849  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toAscii().data());
850  }
851  m_initialDistance = confs[3].toFloat(&ok) / 1000.0;
852  if (!ok) {
853  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toAscii().data());
854  }
855  m_distanceInterval = confs[4].toFloat(&ok) / 1000.0;
856  if (!ok) {
857  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toAscii().data());
858  }
859  m_finalDistance = m_initialDistance + (m_numDistances - 1) * m_distanceInterval;
860 
861  // Resizing the vector of activations
862  m_activations.resize(m_numIR * m_numSamplingAngles * m_numDistances);
863  m_nullActivations.fill(0, m_numIR);
864 
865  // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
866  // in a "creative" way...
867  int i = 0; // The index over the m_activations array
868  for (unsigned int dist = 0; dist < m_numDistances; dist++) {
869  QString turnLine = in.readLine(maxLineLength);
870  QStringList turnLineSplitted = turnLine.split(" ", QString::SkipEmptyParts);
871 
872  // The line we just read should have been split in two. The first element should
873  // be equal to "TURN", the second one to the current dist
874  if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
875  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid TURN line: \"" + turnLine + "\"").toAscii().data());
876  }
877 
878  // Now reading the block for the current distance
879  for (unsigned int ang = 0; ang < m_numSamplingAngles; ang++) {
880  QString activationsLine = in.readLine(maxLineLength);
881  QStringList activationsLineSplitted = activationsLine.split(" ", QString::SkipEmptyParts);
882 
883  // activationsLineSplitted should have m_numIR elements, all integers between 0 and 1023
884  if (activationsLineSplitted.size() != m_numIR) {
885  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toAscii().data());
886  }
887  // Reading activations
888  for (unsigned int id = 0; id < m_numIR; id++) {
889  bool ok;
890  const unsigned int act = activationsLineSplitted[id].toUInt(&ok);
891  if ((!ok) || (act > 1023)) {
892  throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toAscii().data());
893  }
894  m_activations[i++] = act;
895  }
896  }
897  }
898 
899  // The final row in the file should be "END"
900  QString finalLine = in.readLine(maxLineLength);
901  if (finalLine != "END") {
902  throw SampleFileLoadingException(m_filename.toAscii().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toAscii().data());
903  }
904 }
905 
907 {
908  // Nothing to do here
909 }
910 
911 unsigned int SampledIRDataLoader::getActivation(unsigned int i, real dist, real ang) const
912 {
913  // Using the other version of the getActivation function
914  QVector<unsigned int>::const_iterator it = getActivation(dist, ang);
915 
916  return *(it + i);
917 }
918 
919 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const
920 {
921  const real distIndex = (dist - m_initialDistance) / m_distanceInterval;
922  const unsigned int d = (distIndex < 0.0) ? 0 : (unsigned int) distIndex;
923 
924  // If we are over the maximum distance, returning all zeros
925  if (d >= m_numDistances) {
926  return m_nullActivations.begin();
927  }
928 
929  // We first have to restrict the angle between 0.0 and 2*PI, then we can compute the index.
930  const real normAng = ang - (floor(ang / (2.0 * PI_GRECO)) * 2.0 * PI_GRECO);
931  const real angIndex = (normAng / (2.0 * PI_GRECO)) * real(m_numSamplingAngles);
932  unsigned int a = (angIndex < 0.0) ? 0 : (unsigned int) angIndex;
933  if (a >= m_numSamplingAngles) {
934  a = m_numSamplingAngles - 1;
935  }
936 
937  return m_activations.begin() + getLinearIndex(0, a, d);
938 }
939 
940 unsigned int SampledIRDataLoader::getLinearIndex(unsigned int id, unsigned int ang, unsigned int dist) const
941 {
942  // Inverting ang, positive angles in the file are clockwise angles
943  ang = m_numSamplingAngles - ang - 1;
944  return (dist * m_numSamplingAngles + ang) * m_numIR + id;
945 }
946 
947 } // end namespace farsa