icubsensors.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #ifdef FARSA_USE_YARP_AND_ICUB
25 
26 #include "icubsensors.h"
27 #include "configurationhelper.h"
28 #include "motorcontrollers.h"
29 #include "logger.h"
30 #include "graphicalwobject.h"
31 #include <QStringList>
32 
33 namespace farsa {
34 
36  Sensor(params, prefix),
37  icubResource("robot"),
38  neuronsIteratorResource("neuronsIterator")
39 {
40  // Reading parameters
41  icubResource = ConfigurationHelper::getString(params, prefix + "icub", icubResource);
43 
44  // Declaring the resources that are needed here
46 }
47 
49 {
50  // Nothing to do here
51 }
52 
53 void iCubSensor::save(ConfigurationParameters& params, QString prefix)
54 {
55  // Calling parent function
56  Sensor::save(params, prefix);
57 
58  // Saving parameters
59  params.startObjectParameters(prefix, "iCubSensor", this);
60  params.createParameter(prefix, "icub", icubResource);
61  params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
62 }
63 
64 void iCubSensor::describe(QString type)
65 {
66  // Calling parent function
67  Sensor::describe(type);
68 
69  // Describing our parameters
70  Descriptor d = addTypeDescription(type, "The base class for iCub sensors");
71  d.describeString("icub").def("robot").help("the name of the resource associated with the iCub robot to use (default is \"robot\")");
72  d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
73 }
74 
75 void iCubSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
76 {
77  // Calling parent function
78  Sensor::resourceChanged(resourceName, changeType);
79 
80  // Here we only check whether the resource has been deleted and reset the check flag, the
81  // actual work is done in subclasses
82  if (changeType == Deleted) {
84  return;
85  }
86 }
87 
89  iCubSensor(params, prefix),
90  icubMotors(NULL) {
91  icubArm = ConfigurationHelper::getString( params, prefix+"arm", "right" );
92  // Declaring the resources that are needed here
94 }
95 
97  /* nothing to do */
98 }
99 
100 void iCubArmJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
101  iCubSensor::save( params, prefix );
102  params.startObjectParameters( prefix, "iCubArmJointsSensor", this );
103  params.createParameter( prefix, "arm", icubArm );
104 }
105 
106 void iCubArmJointsSensor::describe( QString type ) {
107  iCubSensor::describe( type );
108  Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub arm" );
109  d.describeEnum( "arm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The arm from which the joint angles are read" );
110 }
111 
113  // Checking all resources we need exist
115 
116  // Acquiring the lock to get resources
117  ResourcesLocker locker( this );
118 
119  QStringList values;
120  for( int i=0; i<7; i++ ) {
121  double value;
122  icubMotors->getEncoder( i, &value );
123  values << QString::number( value );
124  }
125  //exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
126  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
127  evonetIt->setCurrentBlock( name() );
128  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
129  double min, max, value;
130  icubMotors->getEncoder(i, &value);
131  icubMotors->getLimits(i,&min,&max);
132  //normalizziamo i valori dei motori tra 0 ed 1;
133  evonetIt->setInput( linearMap(value,min,max,0,1) );
134  }
135 }
136 
138  return 7;
139 }
140 
141 void iCubArmJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
142  iCubSensor::resourceChanged(resourceName, changeType);
143 
144  if (changeType == Deleted) {
145  return;
146  }
147 
148  if (resourceName == icubResource) {
149  iCubRobot* icub = getResource<iCubRobot>();
150  if ( icubArm == "right" ) {
151  icubMotors = icub->rightArmController();
152  } else {
153  icubMotors = icub->leftArmController();
154  }
155  } else if (resourceName == neuronsIteratorResource) {
156  QString lbl;
157  if ( icubArm == "right" ) {
158  lbl = "R";
159  } else {
160  lbl = "L";
161  }
162 
163  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
164  evonetIt->setCurrentBlock( name() );
165  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
166  evonetIt->setGraphicProperties( lbl + QString("a") + QString::number(i), 0.0, 1.0, Qt::red );
167  }
168  } else {
169  Logger::info("Unknown resource " + resourceName + " for " + name());
170  }
171 }
172 
174  iCubSensor(params, prefix),
175  nObjects(3)
176 {
177  nObjects = ConfigurationHelper::getInt(params, prefix + "nObjects", nObjects);
178  // Declaring the resources that are needed here
179  usableResources( QStringList() << icubResource << neuronsIteratorResource << "objects" );
180 }
181 
182 void ColorCameraSensor::save( ConfigurationParameters& params, QString prefix ) {
183  iCubSensor::save( params, prefix );
184  params.startObjectParameters( prefix, "ColorCameraSensor", this );
185  params.createParameter( prefix, "nObjects", QString::number(nObjects) );
186 }
187 
188 void ColorCameraSensor::describe( QString type ) {
189  iCubSensor::describe( type );
190  Descriptor d = addTypeDescription( type, "Color Camera Sensor" );
191  d.describeInt( "nObjects" ).def( 3 ).help( "Number of Objects" );
192 }
193 
194 // update the camera on the basis of 3 objects that should be defined as red, green, and blue
196 {
197  // Checking all resources we need exist
199 
200  // Acquiring the lock to get resources
201  ResourcesLocker locker( this );
202 
203  QVector<WObject*>& objects = *(getResource<QVector<WObject*> >( "objects" ));
204  iCubRobot* icub = getResource<iCubRobot>(icubResource);
205  // Setting the eye matrix in the projector
206  m_projector.setEyeMatrix(icub->headNeck()[4]->matrix());
207 
208  NeuronsIterator* evonetIt = getResource<NeuronsIterator>(neuronsIteratorResource);
209  evonetIt->setCurrentBlock( name() );
210  // Activating the three parts of the map: Red...
211  for(int i = 0; i < nObjects; i++) {
212  // Checking we don't try to access unexisting objects
213  if (i >= objects.size()) {
214  // Filling with 0.5
215  // up-down
216  evonetIt->setInput( 0.5 );
217  evonetIt->nextNeuron();
218  // right-left
219  evonetIt->setInput( 0.5 );
220  evonetIt->nextNeuron();
221  continue;
222  }
223 
224  // Computing the projection of the object on the retina
225  m_projector.set3DPointWorld(objects[i]->matrix().w_pos);
226 
227  // If the object is within the retina, activating the map
228  if (m_projector.pointInsideImage())
229  {
230  // mapPoint01.x.y = distance bteween the barycentre of the object and the border of the
231  // field of view normalized in the range [0.0,1.0]
232  const ImagePoint mapPoint01 = m_projector.getImagePoint01();
233 
234  // up-down
235  //mapPoint01.y = distance normalized tra 0-1 baricentro oggetto e bord fieldofview
236  evonetIt->setInput( 0.5 - mapPoint01.y );
237  evonetIt->nextNeuron();
238  // right-left
239  evonetIt->setInput( 0.5 - mapPoint01.x );
240  evonetIt->nextNeuron();
241 
242  // Storing the position on the retina of the current object
243  m_objectsRetinaPosition.insert(objects[i],mapPoint01);
244  } else {
245  evonetIt->setInput( 0.0 );
246  evonetIt->nextNeuron();
247  evonetIt->setInput( 0.0 );
248  evonetIt->nextNeuron();
249  // Storing an invalid point for the current object
250  m_objectsRetinaPosition.insert(objects[i], ImagePoint());
251  }
252  }
253 }
254 
256 {
257  return nObjects*2;
258 }
259 
260 void ColorCameraSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
261 {
262  iCubSensor::resourceChanged(resourceName, changeType);
263 
264  if (changeType == Deleted) {
265  return;
266  }
267 
268  if (resourceName == icubResource) {
269  // Nothing to do here, we get the robot using getResource() in update()
270  } else if (resourceName == neuronsIteratorResource) {
271  QString lbl[2];
272  lbl[0] = "H"; //horizontal
273  lbl[1] = "V"; //vertical
274  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
275  evonetIt->setCurrentBlock( name() );
276  for (int obj = 0; obj < nObjects; obj++) {
277  for (int i=0; i < 2; i++, evonetIt->nextNeuron()) {
278  QString label;
279  switch (obj) {
280  case 0:
281  label = QString("Cr")+ lbl[i];//QString::number(i);
282  break;
283  case 1:
284  label = QString("Cg")+lbl[i];//QString::number(i);
285  break;
286  case 2:
287  label = QString("Cb")+lbl[i];//QString::number(i);
288  break;
289  default:
290  label = QString::number(obj)+QString::number(i);
291  break;
292  }
293  evonetIt->setGraphicProperties( label, -1.0, 1.0, Qt::red );
294  }
295  }
296  } else if (resourceName == "objects") {
297  // Nothing to do here, we get objects using getResource() in update()
298  } else {
299  Logger::info("Unknown resource " + resourceName + " for " + name());
300  }
301 }
302 
303 //iCubPalmTargetDistSensor : begin implementation
304 // it returns the distance between right or left palm and a defined target
306  iCubSensor(params, prefix) {
307  icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
308  targetName= ConfigurationHelper::getString( params, prefix+"target", "target" );
309  QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
310  QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
311  if ( vec1.size() == 3 && vec2.size() == 3 ) {
312  linearize = true;
313  bbMin = wVector( vec1[0], vec1[1], vec1[2] );
314  bbMax = wVector( vec2[0], vec2[1], vec2[2] );
315  } else {
316  linearize = false;
317  if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
318  Logger::warning( QString("iCubPalmTargetDistSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
319  }
320  }
321 
322  QVector<double> pal1 = ConfigurationHelper::getVector( params, prefix+"palmOffset" );
323  if ( pal1.size() == 3 ) {
324  addPalmOffset = true;
325  palmOffset = wVector( pal1[0], pal1[1], pal1[2] );
326  } else {
327  addPalmOffset = false;
328  if ( !pal1.isEmpty() ) {
329  Logger::warning( QString("iCubPalmTargetDistSensor %1 - palmOffset parameter is not well specified; It will be ignored").arg(name()) );
330  }
331  }
332 
333  // Declaring the resources that are needed here
335 }
336 
338  /* nothing to do */
339 }
340 
342 {
343  iCubSensor::save( params, prefix );
344  params.startObjectParameters( prefix, "iCubPalmTargetDistSensor", this );
345  params.createParameter( prefix, "palm", icubPalm);
346  params.createParameter( prefix, "target", targetName);
347  if ( linearize ) {
348  params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
349  params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
350  }
351 }
352 
354  iCubSensor::describe( type );
355  Descriptor d = addTypeDescription( type, "Sensor for reading the distance between right or left palm and a specified target" );
356  d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the distance to the target is computed" );
357  d.describeString( "target" ).def( "target" ).help( "The name of the resource associated with the target object" );
358  d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [-1,1]" );
359  d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [-1,1]" );
360  d.describeReal( "palmOffset" ).props( IsList ).help( "The offset respect to the palm on which the distance will be computed" );
361 }
362 
364  // Checking all resources we need exist
366 
367  // Acquiring the lock to get resources
368  ResourcesLocker locker( this );
369 
370  iCubRobot* icub = getResource<iCubRobot>( icubResource );
371  WObject* target = getResource<WObject>( targetName );
372  wVector targetPosInICub = icub->matrix().untransformVector( target->matrix().w_pos );
373  wVector palmPosInICub;
374  if ( isLeft ) {
375  wMatrix t2 = icub->leftArm()[6]->matrix();
376  if ( addPalmOffset ) {
377  t2.w_pos += t2.rotateVector( palmOffset );
378  }
379  palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
380  } else {
381  wMatrix t2 = icub->rightArm()[6]->matrix();
382  if ( addPalmOffset ) {
383  t2.w_pos += t2.rotateVector( palmOffset );
384  }
385  palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
386  }
387 
388  wVector distanceVec = palmPosInICub - targetPosInICub;
389  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
390  evonetIt->setCurrentBlock( name() );
391  for( int i=0; i<3; i++ ) {
392  if ( linearize ) {
393  // linearize into [-1,1]
394  evonetIt->setInput( linearMap( distanceVec[i], bbMin[i], bbMax[i], -1, 1 ) );
395  } else {
396  evonetIt->setInput( distanceVec[i] );
397  }
398  evonetIt->nextNeuron();
399  }
400 }
401 
403  return 3;
404 }
405 
406 void iCubPalmTargetDistSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
407  iCubSensor::resourceChanged(resourceName, changeType);
408 
409  if (changeType == Deleted) {
410  return;
411  }
412 
413  if (resourceName == icubResource) {
414  // Nothing to do here, we get the robot using getResource() in update()
415  } else if (resourceName == neuronsIteratorResource) {
416  QString lbl;
417  if ( icubPalm == "right" ) {
418  lbl="R";
419  isLeft = false;
420  } else {
421  lbl="L";
422  isLeft = true;
423  }
424 
425  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
426  evonetIt->setCurrentBlock( name() );
427  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
428  evonetIt->setGraphicProperties( lbl+QString("d")+QString::number(i), -1.0, 1.0, Qt::red );
429  }
430  } else if (resourceName == targetName) {
431  // Nothing to do here, we get the taget using getResource() in update()
432  } else {
433  Logger::info("Unknown resource " + resourceName + " for " + name());
434  }
435 }
436 
437 //iCubPalmTargetDistSensor : end implementation
438 
439 // iCubPalmTouchSEnsor begin implementation
441  iCubSensor(params, prefix) {
442  wPalm=NULL;
443  objects=NULL;
444  icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
445  // Declaring the resources that are needed here
446  usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
447 }
448 
449 void iCubPalmTouchSensor::describe( QString type ) {
450  iCubSensor::describe( type );
451  Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
452  d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the touch sensor is read" );
453 
454 }
456  // Checking all resources we need exist
458 
459  // Acquiring the lock to get resources
460  ResourcesLocker locker( this );
461 
462  double touch=0.0;
463  //we should put following instruction in the resourceChanged method
464  World *world = getResource<World>("world");
465  if(objects!=NULL) {
466  for(int i=0;i<objects->size();i++)
467  {
468  if(world->checkContacts((PhyObject*)wPalm,(PhyObject*)objects->at(i)))
469  touch=1.0;
470  }
471  }
472  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
473  evonetIt->setCurrentBlock( name() );
474 
475  for( int i=0; i<size(); i++, evonetIt->nextNeuron() ) {
476  evonetIt->setInput(touch);
477  }
478 }
479 
481  return 1;
482 }
483 
485 {}
486 
488 {
489  iCubSensor::save( params, prefix );
490  params.startObjectParameters( prefix, "iCubPalmTouchSensor", this );
491  params.createParameter( prefix, "palm", icubPalm);
492 
493 }
494 
495 void iCubPalmTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
496  iCubSensor::resourceChanged(resourceName, changeType);
497 
498  if (changeType == Deleted) {
499  return;
500  }
501 
502  if (resourceName == icubResource) {
503  iCubRobot *icub = getResource<iCubRobot>();
504 
505  if ( icubPalm == "right" ) {
506  wPalm = icub->rightArm()[6];
507  } else {
508  wPalm = icub->leftArm()[6];
509  }
510  } else if (resourceName == "world") {
511  // Nothing to do here
512  } else if (resourceName == "objects") {
513  objects = getResource<QVector<WObject*> >();
514  } else if (resourceName == neuronsIteratorResource) {
515  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
516  evonetIt->setCurrentBlock( name() );
517  for( int i=0; i<1; i++, evonetIt->nextNeuron() ) {
518  if(icubPalm=="right") {
519  evonetIt->setGraphicProperties( QString("Rpt"), 0, 1, Qt::red );
520  } else {
521  evonetIt->setGraphicProperties( QString("Lpt"), 0, 1, Qt::red );
522  }
523  }
524  } else {
525  Logger::info("Unknown resource " + resourceName + " for " + name());
526  }
527 }
528 
529 // iCubPalmTouchSEnsor end implementation
530 
532  iCubSensor(params, prefix),
533  m_icubHand("right"),
534  m_checkAllObjects(true),
535  m_world(NULL),
536  m_icubArm(),
537  m_objects(),
538  m_icub(NULL)
539 {
540  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
541  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", true);
542  // Declaring the resources that are needed here
543  usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
544 
545 }
546 
548 {
549 }
550 
552 {
553  iCubSensor::save( params, prefix );
554  params.startObjectParameters(prefix, "iCubHandTouchSensor", this);
555  params.createParameter(prefix, "hand", m_icubHand);
556  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
557 }
558 
560 {
561  iCubSensor::describe( type );
562  Descriptor d = addTypeDescription(type, "Hand touch sensor", "The touch sensor of the iCub hand. There are six sensors: one on the palm and one for each figertip");
563  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
564  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
565 }
566 
568 {
569  // Checking all resources we need exist
571 
572  // Acquiring the lock to get resources
573  ResourcesLocker locker( this );
574 
575  m_objects = *(getResource<QVector<WObject*> >( "objects" ));
576 
577  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
578  evonetIt->setCurrentBlock(name());
579 
580  evonetIt->setInput(handPieceCollides(m_icubArm[6])); // Palm
581  evonetIt->nextNeuron();
582  evonetIt->setInput(handPieceCollides(m_icubArm[19])); // Index
583  evonetIt->nextNeuron();
584  evonetIt->setInput(handPieceCollides(m_icubArm[20])); // Middle
585  evonetIt->nextNeuron();
586  evonetIt->setInput(handPieceCollides(m_icubArm[21])); // Ring
587  evonetIt->nextNeuron();
588  evonetIt->setInput(handPieceCollides(m_icubArm[22])); // Little
589  evonetIt->nextNeuron();
590  evonetIt->setInput(handPieceCollides(m_icubArm[26])); // Thumb
591  evonetIt->nextNeuron();
592 }
593 
595 {
596  return 6;
597 }
598 
599 void iCubHandTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
600 {
601  iCubSensor::resourceChanged(resourceName, changeType);
602 
603  if (changeType == Deleted) {
604  return;
605  }
606 
607  if (resourceName == icubResource) {
608  m_icub = getResource<iCubRobot>();
609  if (m_icubHand == "left") {
610  m_icubArm = m_icub->leftArm();
611  } else {
612  m_icubArm = m_icub->rightArm();
613  }
614  } else if (resourceName == "world") {
615  m_world = getResource<World>();
616  } else if (resourceName == "objects") {
617  // Nothing to do here, we get objects using getResource() in update()
618  } else if (resourceName == neuronsIteratorResource) {
619  QString lbl;
620  if (m_icubHand == "left") {
621  lbl = "L";
622  } else {
623  lbl = "R";
624  }
625 
626  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
627  evonetIt->setCurrentBlock(name());
628  evonetIt->setGraphicProperties(lbl + "pt", 0.0, 1.0, Qt::red);
629  evonetIt->nextNeuron();
630  evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
631  evonetIt->nextNeuron();
632  evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
633  evonetIt->nextNeuron();
634  evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
635  evonetIt->nextNeuron();
636  evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
637  evonetIt->nextNeuron();
638  evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
639  } else {
640  Logger::info("Unknown resource " + resourceName + " for " + name());
641  }
642 }
643 
644 double iCubHandTouchSensor::handPieceCollides(PhyObject* handPiece)
645 {
646  if (m_icub->isKinematic()) {
647  for (int i = 0; i < m_objects.size(); i++) {
648  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects[i]);
649  if ((obj != NULL) && (m_world->checkContacts(handPiece, (PhyObject*) m_objects[i]))) {
650  return 1.0;
651  }
652  }
653 
654  return 0.0;
655  } else {
656  // Taking the vector of contacts. If no contact is present, this returns an empty vector
657  const contactVec& c = m_world->contacts()[handPiece];
658 
659  if (c.size() == 0) {
660  return 0.0;
661  } else if (m_checkAllObjects) {
662  return 1.0;
663  } else {
664  for (int i = 0; i < m_objects.size(); i++) {
665  for (int j = 0; j < c.size(); j++) {
666  if (c[j].collide == m_objects[i]) {
667  return 1.0;
668  }
669  }
670  }
671 
672  return 0.0;
673  }
674  }
675 
676  return 0.0;
677 }
678 
679 // iCubTorsoJointsSensor begin implementation
681  iCubSensor(params, prefix) {
682  // Declaring the resources that are needed here
684 }
685 
686 void iCubTorsoJointsSensor::describe( QString type ) {
687  iCubSensor::describe( type );
688  Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
689 
690 }
691 
693  // Checking all resources we need exist
695 
696  // Acquiring the lock to get resources
697  ResourcesLocker locker( this );
698 
699  double minRot, maxRot;
700  double minFlex, maxFlex;
701  double curRot;
702  double curFlex;
703 
704  icubMotors->getLimits(0, &minRot, &maxRot);
705  icubMotors->getLimits(2, &minFlex, &maxFlex);
706  icubMotors->getEncoder(0, &curRot);
707  icubMotors->getEncoder(2, &curFlex);
708 
709  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
710  evonetIt->setCurrentBlock( name() );
711  evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) * 2.0 - 1.0);
712  evonetIt->nextNeuron();
713  evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
714 
715 }
716 
718  return 2;
719 }
720 
721 void iCubTorsoJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
722  iCubSensor::resourceChanged(resourceName, changeType);
723 
724  if (changeType == Deleted) {
725  return;
726  }
727 
728  if (resourceName == icubResource) {
729  iCubRobot* icub = getResource<iCubRobot>();
730  icubMotors = icub->torsoController();
731  } else if (resourceName == neuronsIteratorResource) {
732  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
733  evonetIt->setCurrentBlock( name() );
734 
735  evonetIt->setGraphicProperties( QString("t0"), 0, 1, Qt::red ); //rotation
736  evonetIt->nextNeuron();
737  evonetIt->setGraphicProperties( QString("t1"), 0, 1, Qt::red ); //flexion
738  evonetIt->nextNeuron();
739  } else {
740  Logger::info("Unknown resource " + resourceName + " for " + name());
741  }
742 }
743 
745 {}
746 
748 {
749  iCubSensor::save( params, prefix );
750  params.startObjectParameters( prefix, "iCubTorsoJointsSensor", this );
751 
752 
753 }
754 // iCubTorsoJointsSensor end implementation
755 
756 // iCubHeadJointsSensor begin implementation
758  iCubSensor(params, prefix) {
759  // Declaring the resources that are needed here
761 }
762 
764 {}
765 
767 {
768  iCubSensor::save( params, prefix );
769  params.startObjectParameters( prefix, "iCubHeadJointsSensor", this );
770 }
771 
772 void iCubHeadJointsSensor::describe( QString type ) {
773  iCubSensor::describe( type );
774  Descriptor d = addTypeDescription( type, "Sensor for reading head sensors" );
775 
776 }
777 
779  // Checking all resources we need exist
781 
782  // Acquiring the lock to get resources
783  ResourcesLocker locker( this );
784 
785  double minRot, maxRot;
786  double minFlex, maxFlex;
787  double curRot;
788  double curFlex;
789 
790  icubMotors->getLimits(0, &minRot, &maxRot);
791  icubMotors->getLimits(2, &minFlex, &maxFlex);
792  icubMotors->getEncoder(0, &curRot);
793  icubMotors->getEncoder(2, &curFlex);
794 
795  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
796  evonetIt->setCurrentBlock( name() );
797  evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) );
798  evonetIt->nextNeuron();
799  evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
800 
801 }
802 
804  return 2;
805 }
806 
807 void iCubHeadJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
808  iCubSensor::resourceChanged(resourceName, changeType);
809 
810  if (changeType == Deleted) {
811  return;
812  }
813 
814  if (resourceName == icubResource) {
815  iCubRobot* icub = getResource<iCubRobot>();
816  icubMotors = icub->headNeckController();
817  } else if (resourceName == neuronsIteratorResource) {
818  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
819  evonetIt->setCurrentBlock( name() );
820 
821  evonetIt->setGraphicProperties( QString("n0"), 0, 1, Qt::red ); //n stands for neck
822  evonetIt->nextNeuron();
823  evonetIt->setGraphicProperties( QString("n1"), 0, 1, Qt::red );
824  evonetIt->nextNeuron();
825  } else {
826  Logger::info("Unknown resource " + resourceName + " for " + name());
827  }
828 }
829 
830 // iCubHeadJointsSensor end implementation
831 
832 // end iCubHandJointsSensor
834  iCubSensor(params, prefix),
835  icubMotors(NULL) {
836  icubHand = ConfigurationHelper::getString( params, prefix+"hand", "right" );
837  // Declaring the resources that are needed here
839 }
840 
842  /* nothing to do */
843 }
844 
845 void iCubHandJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
846  iCubSensor::save( params, prefix );
847  params.startObjectParameters( prefix, "iCubHandJointsSensor", this );
848  params.createParameter( prefix, "hand", icubHand );
849 }
850 
851 void iCubHandJointsSensor::describe( QString type ) {
852  iCubSensor::describe( type );
853  Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub hand" );
854  d.describeEnum( "hand" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The hand from which the joint angles are read" );
855 }
856 
858  // Checking all resources we need exist
860 
861  // Acquiring the lock to get resources
862  ResourcesLocker locker( this );
863 
864 // QStringList values;
865 // for( int i=9; i<16; i++ ) {
866 // double value;
867 // icubMotors->getEncoder( i, &value );
868 // values << QString::number( value );
869 // }
870 // exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
871  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
872  evonetIt->setCurrentBlock( name() );
873  for( int i=9; i<16; i++, evonetIt->nextNeuron() ) {
874  double min, max, value;
875  icubMotors->getEncoder(i, &value);
876  icubMotors->getLimits(i,&min,&max);
877  //normalizziamo i valori dei motori tra 0 ed 1;
878  evonetIt->setInput( linearMap(value,min,max,0,1) );
879  }
880 }
881 
883  return 7;
884 }
885 
886 void iCubHandJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
887  iCubSensor::resourceChanged(resourceName, changeType);
888 
889  if (changeType == Deleted) {
890  return;
891  }
892 
893  if (resourceName == icubResource) {
894  iCubRobot* icub = getResource<iCubRobot>();
895  if ( icubHand == "right" ) {
896  icubMotors = icub->rightArmController();
897  } else {
898  icubMotors = icub->leftArmController();
899  }
900  } else if (resourceName == neuronsIteratorResource) {
901  QString label;
902  if ( icubHand == "right" ) {
903  label="R";
904  } else {
905  label="L";
906  }
907 
908  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
909  evonetIt->setCurrentBlock( name() );
910  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
911  evonetIt->setGraphicProperties( label+QString("f")+QString::number(i), 0.0, 1.0, Qt::red ); //f stands for fingers
912  }
913  } else {
914  Logger::info("Unknown resource " + resourceName + " for " + name());
915  }
916 }
917 // end iCubHandJointsSensor
918 
919 namespace __PalmAndFingertipTouchSensor_internal {
920  #ifndef GLMultMatrix
921  #define GLMultMatrix glMultMatrixf
922  // for double glMultMatrixd
923  #endif
924 
928  const float epsilon = 0.0001f;
929 
936  const float numDivisionsFor90Degrees = 5.0f;
937 
941  const int maxNumContacts = 20;
942 
950  {
951  public:
977  FingertipTouchSensorGraphic(PhyObject *handPiece, double alpha, double h, bool isRight, bool isThumb, QString name = "unamed") :
978  GraphicalWObject(handPiece->world(), name),
979  m_handPiece(dynamic_cast<PhyCylinder*>(handPiece)),
980  m_handPieceHeight(m_handPiece->height()),
981  m_handPieceRadius(m_handPiece->radius()),
982  m_alpha(alpha),
983  m_h(h),
984  m_alphaOffset(computeAlphaOffset(isRight, isThumb)),
985  m_angularIncrement(m_alpha / (ceil(m_alpha / (M_PI / 2.0)) * numDivisionsFor90Degrees)),
988  m_isActive(false),
990  {
991  // Attaching to handPiece (which also becomes our owner)
993 
994  // We also use our own color and texture
996  setTexture("");
997  setColor(Qt::cyan);
998  }
999 
1004  {
1005  }
1006 
1014  void setActive(bool isActive)
1015  {
1016  m_isActiveMutex.lock();
1017  m_isActive = isActive;
1018  m_isActiveMutex.unlock();
1019  }
1020 
1021  protected:
1030  virtual void render(RenderWObject* renderer, QGLContext* gw)
1031  {
1032  // First of all changing our color depending on the value of m_isActive
1033  m_isActiveMutex.lock();
1034  if (m_isActive) {
1035  setColor(Qt::red);
1036  } else {
1037  setColor(Qt::cyan);
1038  }
1039  m_isActiveMutex.unlock();
1040 
1041  // Bringing the coordinate system on the fingerip
1042  wMatrix mtr = tm;
1043  mtr.w_pos += mtr.x_ax.scale(m_handPieceHeight / 2.0);
1044 
1045  glPushMatrix();
1046  renderer->container()->setupColorTexture(gw, renderer);
1047  GLMultMatrix(&mtr[0][0]);
1048 
1049  // Drawing the top part of the sensor
1050  glBegin(GL_TRIANGLES);
1051 
1052  // All normals here are along the x axis. All triangles have a vertex on
1053  // the axis of the cylinder
1054  const float adjustedRadius = m_handPieceRadius + epsilon;
1055  for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
1056  // Computing the next angle (we have to do this to avoid numerical errors)
1057  const float nextAngle = angle + m_angularIncrement;
1058  const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
1059  glNormal3f(1.0, 0.0, 0.0);
1060  glVertex3f(epsilon, 0.0, 0.0);
1061  glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1062  glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1063  }
1064  glEnd();
1065 
1066  // Now drawing the remaining part
1067  glBegin(GL_QUADS);
1068 
1069  // Here we have to compute the right normal for each face
1070  for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
1071  // Computing the next angle (we have to do this to avoid numerical errors)
1072  const float nextAngle = angle + m_angularIncrement;
1073  const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
1074  // To compute the normal we take two vectors along two adiacent sides of the quad, compute the cross
1075  // product and then normalize it (the product order is important, of course)
1076  const wVector v1(0.0, sin(angle) - sin(angle + m_angularIncrement), cos(angle) - cos(angle + m_angularIncrement));
1077  const wVector v2(1.0, 0.0, 0.0);
1078  const wVector normal = (v1 * v2).normalize();
1079  glNormal3f(normal.x, normal.y, normal.z);
1080 
1081  glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1082  glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1083  glVertex3f(-m_h, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1084  glVertex3f(-m_h, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1085  }
1086  glEnd();
1087  glPopMatrix();
1088  }
1089 
1090  static float computeAlphaOffset(bool isRight, bool isThumb) {
1091  // The angle we use when drawing is respect to the z axis on the yz plane. Here we compute
1092  // the offsets needed in different cases (see the comment in the function
1093  // iCubFingertipsTouchSensor::goodCollisionPoint for more information about the frame of
1094  // references of the various fingers in the two hands)
1095  float offset = 0.0;
1096  if (isRight) {
1097  offset = isThumb ? M_PI / 2.0 : 0.0;
1098  } else {
1099  offset = isThumb ? M_PI / 2.0 : M_PI;
1100  }
1101 
1102  return offset;
1103  }
1104 
1110 
1114  const real m_handPieceHeight;
1115 
1119  const real m_handPieceRadius;
1120 
1124  const float m_alpha;
1125 
1129  const float m_h;
1130 
1138  const float m_alphaOffset;
1139 
1146  const float m_angularIncrement;
1147 
1151  const float m_startingAngle;
1152 
1156  const float m_endingAngle;
1157 
1164 
1172  };
1173 
1181  {
1182  public:
1202  PalmPatchesTouchSensorGraphic(PhyObject *handPalm, const QVector<iCubPalmPatchesTouchSensor::Triangle>& patches, bool isRight, QString name = "unamed") :
1203  GraphicalWObject(handPalm->world(), name),
1204  m_handPalm(dynamic_cast<PhyBox*>(handPalm)),
1205  m_patches(patches),
1206  m_isRight(isRight),
1207  m_zAxisDirection(isRight ? 1.0 : -1.0),
1208  m_activations(m_patches.size(), false),
1210  {
1211  // Attaching to handPalm (which also becomes our owner)
1212  attachToObject(m_handPalm, true);
1213 
1214  // We also use our own color and texture
1216  setTexture("");
1217  setColor(Qt::cyan);
1218  }
1219 
1224  {
1225  }
1226 
1233  void setActivations(const QVector<bool> activations)
1234  {
1235  m_activationsMutex.lock();
1236  m_activations = activations;
1237  m_activationsMutex.unlock();
1238  }
1239 
1240  protected:
1249  virtual void render(RenderWObject* renderer, QGLContext* gw)
1250  {
1251  // Copying the m_activations vector to a local vector to avoid concurrent accesses
1252  m_activationsMutex.lock();
1253  const QVector<bool> activations(m_activations);
1254  m_activationsMutex.unlock();
1255 
1256  // We receive the list of triangles from the sensor, we just need to display them
1257  glPushMatrix();
1258  renderer->container()->setupColorTexture(gw, renderer);
1259  GLMultMatrix(&tm[0][0]);
1260 
1261  // First drawing the triangles making up the sensor
1262  glBegin(GL_TRIANGLES);
1263  glNormal3f(0.0, 0.0, m_zAxisDirection);
1264  for (int i = 0; i < m_patches.size(); i++) {
1266 
1267  QColor col;
1268  if (activations[i]) {
1269  col = Qt::red;
1270  } else {
1271  col = Qt::cyan;
1272  }
1273  glColor4f(col.redF(), col.greenF(), col.blueF(), col.alphaF());
1274 
1275  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1276  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1277  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1278  }
1279  glEnd();
1280 
1281  // Now drawing the lines separating the triangles. Using the for we draw some line twice,
1282  // it's not a big problem
1283  glBegin(GL_LINES);
1284  glNormal3f(0.0, 0.0, m_zAxisDirection);
1285  glColor4f(0.0, 0.0, 0.0, 1.0);
1286  for (int i = 0; i < m_patches.size(); i++) {
1288 
1289  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1290  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1291 
1292  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1293  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1294 
1295  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1296  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1297  }
1298  glEnd();
1299  glPopMatrix();
1300  }
1301 
1307 
1311  const QVector<iCubPalmPatchesTouchSensor::Triangle> m_patches;
1312 
1317  const bool m_isRight;
1318 
1326  const float m_zAxisDirection;
1327 
1335  QVector<bool> m_activations;
1336 
1344  };
1345 }
1346 
1347 using namespace __PalmAndFingertipTouchSensor_internal;
1348 
1350  iCubSensor(params, prefix),
1351  m_icubHand("right"),
1352  m_isRight(true),
1353  m_checkAllObjects(true),
1354  m_alpha(M_PI / 4.0), // 45°
1355  m_h(0.01),
1356  m_drawSensor(true),
1357  m_world(NULL),
1358  m_icubArm(),
1359  m_objects(NULL),
1360  m_icub(NULL),
1361  m_graphicalTouchSensors()
1362 {
1363  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
1364  if (m_icubHand.toLower() == "right") {
1365  m_isRight = true;
1366  } else if (m_icubHand.toLower() == "left") {
1367  m_isRight = false;
1368  } else {
1369  ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1370  }
1371  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1372  m_alpha = toRad(ConfigurationHelper::getDouble(params, prefix + "alpha", toDegree(m_alpha)));
1373  m_h = ConfigurationHelper::getDouble(params, prefix + "h", m_h);
1374  m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
1375 
1376  // Declaring the resources that are needed here
1377  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1378 }
1379 
1381 {
1382  // Nothing to do here, renderers are destroyed by their owners
1383 }
1384 
1386 {
1387  iCubSensor::save(params, prefix);
1388  params.startObjectParameters(prefix, "iCubFingertipsTouchSensor", this);
1389  params.createParameter(prefix, "hand", m_icubHand);
1390  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1391  params.createParameter(prefix, "alpha", QString::number(toDegree(m_alpha)));
1392  params.createParameter(prefix, "h", QString::number(m_h));
1393  params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1394 }
1395 
1397 {
1398  iCubSensor::describe(type);
1399  Descriptor d = addTypeDescription(type, "Hand fingertips touch sensor", "The touch sensor of the iCub fingertips. There are five sensors, one for each figertip");
1400  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1401  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1402  d.describeReal("alpha").def(45.0).help("The aperture of the sensor surface", "The aperture angle of the sensor surface in degrees (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1403  d.describeReal("h").def(0.01).help("The height of the sensor surface", "The height of the sensor surface (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1404  d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1405 }
1406 
1408 {
1409  // Checking all resources we need exist
1411 
1412  // Acquiring the lock to get resources
1413  ResourcesLocker locker(this);
1414 
1415  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
1416  evonetIt->setCurrentBlock(name());
1417 
1418  // These are the indexes of fingertips in the vector of icub arm parts
1419  static const unsigned int indexes[] = {19, 20, 21, 22, 26};
1420  for (unsigned int i = 0; i < 5; i++) {
1421  // The thumb is the last index (26)
1422  const double collision = handPieceCollides(m_icubArm[indexes[i]], (i == 4) ? true : false);
1423  // If sensors are also drawn, we change the color of the sensor depending on whether it
1424  // collides with an object or not
1425  if (m_drawSensor) {
1426  m_graphicalTouchSensors[i]->setActive((collision > 0.5));
1427  }
1428  evonetIt->setInput(collision);
1429  evonetIt->nextNeuron();
1430  }
1431 }
1432 
1434 {
1435  return 5;
1436 }
1437 
1438 void iCubFingertipsTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1439 {
1440  iCubSensor::resourceChanged(resourceName, changeType);
1441 
1442  if (changeType == Deleted) {
1443  return;
1444  }
1445 
1446  if (resourceName == icubResource) {
1447  m_icub = getResource<iCubRobot>();
1448  if (m_isRight) {
1449  m_icubArm = m_icub->rightArm();
1450  } else {
1451  m_icubArm = m_icub->leftArm();
1452  }
1453 
1454  // Checking if we have to draw the sensors. This is here because it requires a pointer to icub parts
1455  if (m_drawSensor) {
1456  m_graphicalTouchSensors.clear();
1457 
1458  // Creating graphical objects representing the touch sensors areas. They will set the finger piece as
1459  // their owner so that the destruction of the objects is handled by them
1460  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[19], m_alpha, m_h, m_isRight, false)); // Index
1461  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[20], m_alpha, m_h, m_isRight, false)); // Middle
1462  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[21], m_alpha, m_h, m_isRight, false)); // Ring
1463  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[22], m_alpha, m_h, m_isRight, false)); // Little
1464  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[26], m_alpha, m_h, m_isRight, true)); // Thumb
1465  }
1466  } else if (resourceName == neuronsIteratorResource) {
1467  QString lbl;
1468  if (m_isRight) {
1469  lbl = "R";
1470  } else {
1471  lbl = "L";
1472  }
1473 
1474  EvonetIterator* evonetIt = getResource<EvonetIterator>();
1475  evonetIt->setCurrentBlock(name());
1476  evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
1477  evonetIt->nextNeuron();
1478  evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
1479  evonetIt->nextNeuron();
1480  evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
1481  evonetIt->nextNeuron();
1482  evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
1483  evonetIt->nextNeuron();
1484  evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
1485  evonetIt->nextNeuron();
1486  } else if (resourceName == "objects") {
1487  m_objects = getResource<QVector<WObject*> >();
1488  } else if (resourceName == "world") {
1489  m_world = getResource<World>();
1490  } else {
1491  Logger::info("Unknown resource " + resourceName + " for " + name());
1492  }
1493 }
1494 
1495 double iCubFingertipsTouchSensor::handPieceCollides(PhyObject* handPiece, bool isThumb) const
1496 {
1497  if (m_icub->isKinematic() || !m_checkAllObjects) {
1498  for (int i = 0; i < m_objects->size(); i++) {
1499  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
1500  QVector<wVector> contacts;
1501  if ((obj != NULL) && (m_world->smartCheckContacts(handPiece, (PhyObject*) m_objects->at(i), maxNumContacts, &contacts))) {
1502  for (int j = 0; j < contacts.size(); j++) {
1503  if (goodCollisionPoint(handPiece, contacts[j], isThumb)) {
1504  return 1.0;
1505  }
1506  }
1507  }
1508  }
1509 
1510  return 0.0;
1511  } else {
1512  // Taking the vector of contacts. If no contact is present, this returns an empty vector
1513  const contactVec& c = m_world->contacts()[handPiece];
1514 
1515  for (int i = 0; i < c.size(); i++) {
1516  if (goodCollisionPoint(handPiece, c[i].pos, isThumb)) {
1517  return 1.0;
1518  }
1519  }
1520 
1521  return 0.0;
1522  }
1523 
1524  return 0.0;
1525 }
1526 
1527 bool iCubFingertipsTouchSensor::goodCollisionPoint(PhyObject* handPiece, const wVector& collisionPoint, bool isThumb) const
1528 {
1529  // The various fingertips have frame of references with different orientations, so the direction towards
1530  // the palm (i.e. where the touch sensor area lies) is along different axes:
1531  // - right hand:
1532  // - thumb: +y axis
1533  // - all other fingers: +z axis
1534  // - left hand:
1535  // - thumb: +y axis
1536  // - all other fingers: -z axis
1537  // Here we calculate the angle on the yz plane, but the 0 angle is on different axes depending on the
1538  // hand piece, as in the list above
1539 
1540  float angle;
1541  if (isThumb) {
1542  angle = atan2(collisionPoint.z, collisionPoint.y);
1543  } else {
1544  if (m_isRight) {
1545  angle = atan2(collisionPoint.y, collisionPoint.z);
1546  } else {
1547  angle = atan2(collisionPoint.y, -collisionPoint.z);
1548  }
1549  }
1550  // Also computing the distance from the fingertip (to ease the check below)
1551  const float distFromFingertip = (dynamic_cast<PhyCylinder*>(handPiece))->height() / 2.0 - collisionPoint.x;
1552 
1553  // Checking if the collision point is good
1554  if ((angle >= -m_alpha) && (angle <= m_alpha) && (distFromFingertip <= m_h)) {
1555  return true;
1556  }
1557 
1558  return false;
1559 }
1560 
1562  iCubSensor(params, prefix),
1563  m_icubHand("right"),
1564  m_isRight(true),
1565  m_checkAllObjects(true),
1566  m_drawSensor(true),
1567  m_world(NULL),
1568  m_icubArm(),
1569  m_objects(NULL),
1570  m_icub(NULL),
1571  m_handPalm(NULL),
1572  m_patches(),
1573  m_graphicalTouchSensor(NULL)
1574 {
1575  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
1576  if (m_icubHand.toLower() == "right") {
1577  m_isRight = true;
1578  } else if (m_icubHand.toLower() == "left") {
1579  m_isRight = false;
1580  } else {
1581  ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1582  }
1583  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1584  m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
1585 
1586  // Declaring the resources that are needed here
1587  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1588 }
1589 
1591 {
1592  // Nothing to do here, the renderer is destroyed by its owners
1593 }
1594 
1596 {
1597  iCubSensor::save(params, prefix);
1598  params.startObjectParameters(prefix, "iCubPalmPatchesTouchSensor", this);
1599  params.createParameter(prefix, "hand", m_icubHand);
1600  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1601  params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1602 }
1603 
1605 {
1606  iCubSensor::describe(type);
1607  Descriptor d = addTypeDescription(type, "Hand palm touch sensor", "The touch sensor of the iCub hand palm. There are four sensors, roughly in the same positions of the four patches on the real iCub hand");
1608  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1609  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1610  d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1611 }
1612 
1614 {
1615  // Checking all resources we need exist
1617 
1618  // Acquiring the lock to get resources
1619  ResourcesLocker locker(this);
1620 
1621  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
1622  evonetIt->setCurrentBlock(name());
1623 
1624  // First of all we have to get the list of collision points
1625  QVector<wVector> contacts;
1626  if (m_icub->isKinematic() || !m_checkAllObjects) {
1627  // Checking contacts with objects in the list. Appending all contacts to the contacts vector
1628  for (int i = 0; i < m_objects->size(); i++) {
1629  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
1630  QVector<wVector> contactsWithObj;
1631  if (obj != NULL) {
1632  m_world->smartCheckContacts(m_handPalm, (PhyObject*) m_objects->at(i), maxNumContacts, &contactsWithObj);
1633  contacts << contactsWithObj;
1634  }
1635  }
1636  } else {
1637  // Taking the vector of contacts. If no contact is present, this returns an empty vector
1638  const contactVec& c = m_world->contacts()[m_handPalm];
1639 
1640  for (int i = 0; i < c.size(); i++) {
1641  contacts.append(c[i].pos);
1642  }
1643  }
1644 
1645  // Now we have to check each contact point for each triangle. We also save activations into a QVector
1646  // if the sensor is drawn
1647  QVector<bool> activations;
1648  if (m_drawSensor) {
1649  activations.fill(false, m_patches.size());
1650  }
1651  for (int i = 0; i < m_patches.size(); i++) {
1652  float activation = 0.0;
1653  for (int j = 0; j < contacts.size(); j++) {
1654  if (pointInPalmTriangle(contacts[j], m_patches[i])) {
1655  activation = 1.0;
1656  if (m_drawSensor) {
1657  activations[i] = true;
1658  }
1659  break;
1660  }
1661  }
1662  evonetIt->setInput(activation);
1663  evonetIt->nextNeuron();
1664  }
1665  if (m_drawSensor) {
1666  m_graphicalTouchSensor->setActivations(activations);
1667  }
1668 }
1669 
1671 {
1672  // The number of patches is always 4
1673  return 4;
1674 }
1675 
1676 void iCubPalmPatchesTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1677 {
1678  iCubSensor::resourceChanged(resourceName, changeType);
1679 
1680  if (changeType == Deleted) {
1681  return;
1682  }
1683 
1684  if (resourceName == icubResource) {
1685  m_icub = getResource<iCubRobot>();
1686  if (m_isRight) {
1687  m_icubArm = m_icub->rightArm();
1688  } else {
1689  m_icubArm = m_icub->leftArm();
1690  }
1691  m_handPalm = dynamic_cast<PhyBox*>(m_icubArm[6]);
1692 
1693  // Creating the list of triangles. The palmDirection constant is needed because the right and
1694  // left hand have different frame of references: in the right hand the palm is towards +z, while
1695  // in the left hand it is towards -z. The patches center is not in the middle of the palm along
1696  // the y axis, it is slighly moved torads the side opposite to the thumb
1697  const float palmDirection = m_isRight ? +1.0 : -1.0;
1698  const float triangleSide = m_handPalm->sideX() / 2.0f;
1699  const float triangleHalfSide = triangleSide / 2.0f;
1700  const float triangleHeight = sqrt((triangleSide * triangleSide) - (triangleHalfSide * triangleHalfSide));
1701  const float palmZ = palmDirection * m_handPalm->sideZ() / 2.0;
1702  const float palmCenterY = (m_handPalm->sideY() / 2.0) - triangleHeight * 1.1;
1703  Triangle t;
1704  m_patches.clear();
1705 
1706  t.a = wVector(0.0, palmCenterY, palmZ);
1707  t.b = wVector(triangleSide, palmCenterY, palmZ);
1708  t.c = wVector(triangleHalfSide, palmCenterY - triangleHeight, palmZ);
1709  m_patches.append(t);
1710 
1711  t.a = wVector(0.0, palmCenterY, palmZ);
1712  t.b = wVector(triangleSide, palmCenterY, palmZ);
1713  t.c = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1714  m_patches.append(t);
1715 
1716  t.a = wVector(0.0, palmCenterY, palmZ);
1717  t.b = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1718  t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1719  m_patches.append(t);
1720 
1721  t.a = wVector(0.0, palmCenterY, palmZ);
1722  t.b = wVector(-triangleSide, palmCenterY, palmZ);
1723  t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1724  m_patches.append(t);
1725 
1726  if (m_drawSensor) {
1727  m_graphicalTouchSensor = new PalmPatchesTouchSensorGraphic(m_handPalm, m_patches, m_isRight);
1728  }
1729  } else if (resourceName == neuronsIteratorResource) {
1730  QString lbl;
1731  if (m_isRight) {
1732  lbl = "R";
1733  } else {
1734  lbl = "L";
1735  }
1736 
1737  EvonetIterator* evonetIt = getResource<EvonetIterator>();
1738  evonetIt->setCurrentBlock(name());
1739  evonetIt->setGraphicProperties(lbl + "p1", 0.0, 1.0, Qt::red);
1740  evonetIt->nextNeuron();
1741  evonetIt->setGraphicProperties(lbl + "p2", 0.0, 1.0, Qt::red);
1742  evonetIt->nextNeuron();
1743  evonetIt->setGraphicProperties(lbl + "p3", 0.0, 1.0, Qt::red);
1744  evonetIt->nextNeuron();
1745  evonetIt->setGraphicProperties(lbl + "p4", 0.0, 1.0, Qt::red);
1746  evonetIt->nextNeuron();
1747  } else if (resourceName == "objects") {
1748  m_objects = getResource<QVector<WObject*> >();
1749  } else if (resourceName == "world") {
1750  m_world = getResource<World>();
1751  } else {
1752  Logger::info("Unknown resource " + resourceName + " for " + name());
1753  }
1754 }
1755 
1756 bool iCubPalmPatchesTouchSensor::pointInPalmTriangle(const wVector& point, const Triangle& triangle) const
1757 {
1758  // Checking that the point is on the palm side of the hand
1759  if (((m_isRight) && (point.z < 0.0)) || ((!m_isRight) && (point.z > 0.0))) {
1760  return false;
1761  }
1762 
1763  // The algorithm used here uses Barycentric Coordinates to check if a point is inside a triangle or not.
1764  // You can find mode information at the following links:
1765  // http://en.wikipedia.org/wiki/Barycentric_coordinates_(mathematics)
1766  // http://www.blackpawn.com/texts/pointinpoly/default.html
1767  // The version implemented here is directly taken from the second link (an offline version is in the
1768  // documentation). We discard the z coordinate (and do computations in 2D) because the check on z has
1769  // already been done before
1770 
1771  // Compute vectors
1772  const float v0x = triangle.c.x - triangle.a.x;
1773  const float v0y = triangle.c.y - triangle.a.y;
1774  const float v1x = triangle.b.x - triangle.a.x;
1775  const float v1y = triangle.b.y - triangle.a.y;
1776  const float v2x = point.x - triangle.a.x;
1777  const float v2y = point.y - triangle.a.y;
1778 
1779  // Compute dot products
1780  const float dot00 = v0x * v0x + v0y * v0y;
1781  const float dot01 = v0x * v1x + v0y * v1y;
1782  const float dot02 = v0x * v2x + v0y * v2y;
1783  const float dot11 = v1x * v1x + v1y * v1y;
1784  const float dot12 = v1x * v2x + v1y * v2y;
1785 
1786  // Compute barycentric coordinates
1787  const float invDenom = 1.0 / (dot00 * dot11 - dot01 * dot01);
1788  const float u = (dot11 * dot02 - dot01 * dot12) * invDenom;
1789  const float v = (dot00 * dot12 - dot01 * dot02) * invDenom;
1790 
1791  // Check if point is in triangle
1792  return (u >= 0) && (v >= 0) && (u + v < 1);
1793 }
1794 
1796  iCubSensor(params, prefix),
1797  m_icubHand("right"),
1798  m_world(NULL),
1799  m_eye(NULL),
1800  m_hand(NULL),
1801  m_object(NULL)
1802 {
1803  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
1804 
1805  // Declaring the resources that are needed here
1806  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1807 }
1808 
1810 {
1811 }
1812 
1814 {
1815  iCubSensor::save(params, prefix);
1816 
1817  params.startObjectParameters(prefix, "HandObjectVisualOffsetSensor", this);
1818  params.createParameter(prefix, "hand", m_icubHand);
1819 }
1820 
1822 {
1823  iCubSensor::describe(type);
1824  Descriptor d = addTypeDescription(type, "Visual offset between the hand and the object", "This sensor computes the distance between the hand and the first object in the visual field of the robot. Returns the distances on the vertical and horizontal axes");
1825  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1826 }
1827 
1829 {
1830  // Checking all resources we need exist
1832 
1833  // Acquiring the lock to get resources
1834  ResourcesLocker locker(this);
1835 
1836  // We get this here because we are not notified if the vector changes (i.e. elemets are added or deleted),
1837  // only if the vector is replaced with another vector (and that doesn't happend)
1838  m_object = (*(getResource<QVector<WObject*> >("objects")))[0];
1839 
1840  // Setting the eye matrix in the projector
1841  Projector projector;
1842  projector.setEyeMatrix(m_eye->matrix());
1843 
1844  // Computing the projection of the object on the retina
1845  projector.set3DPointWorld(m_object->matrix().w_pos);
1846  const ImagePoint objPos = projector.getImagePoint01();
1847 
1848  // Computing the projection of the hand on the retina
1849  projector.set3DPointWorld(m_hand->matrix().w_pos);
1850  const ImagePoint handPos = projector.getImagePoint01();
1851 
1852  double dx, dy;
1853  if(objPos.isValid() && handPos.isValid()) {
1854  dx = objPos.x - handPos.x;
1855  dx = tanh(5*dx);
1856 
1857  dy = objPos.y - handPos.y;
1858  dy = tanh(5*dx);
1859  } else {
1860  dx = 0;
1861  dy = 0;
1862  }
1863 
1864  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
1865  evonetIt->setCurrentBlock(name());
1866  evonetIt->setInput(dx);
1867  evonetIt->nextNeuron();
1868  evonetIt->setInput(dy);
1869 }
1870 
1872 {
1873  return 2;
1874 }
1875 
1876 void HandObjectVisualOffsetSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1877 {
1878  iCubSensor::resourceChanged(resourceName, changeType);
1879 
1880  if (changeType == Deleted) {
1881  return;
1882  }
1883 
1884  if (resourceName == icubResource) {
1885  iCubRobot* icub = getResource<iCubRobot>();
1886  m_eye = icub->headNeck()[4];
1887  if (m_icubHand == "left") {
1888  m_hand = icub->leftArm()[6];
1889  } else {
1890  m_hand = icub->rightArm()[6];
1891  }
1892  } else if (resourceName == neuronsIteratorResource) {
1893  EvonetIterator* evonetIt = getResource<EvonetIterator>();
1894  evonetIt->setCurrentBlock(name());
1895  evonetIt->setGraphicProperties("dx", -1.0, 1.0, Qt::red);
1896  evonetIt->nextNeuron();
1897  evonetIt->setGraphicProperties("dy", -1.0, 1.0, Qt::red);
1898  } else if (resourceName == "objects") {
1899  // Nothing to do here, we get objects with getResource() in update()
1900  } else if (resourceName == "world") {
1901  m_world = getResource<World>();
1902  } else {
1903  Logger::info("Unknown resource " + resourceName + " for " + name());
1904  }
1905 }
1906 
1907 } // end namespace farsa
1908 
1909 #endif