icubsensors.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #ifdef FARSA_USE_YARP_AND_ICUB
25 
26 #include "icubsensors.h"
27 #include "configurationhelper.h"
28 #include "motorcontrollers.h"
29 #include "logger.h"
30 #include "graphicalwobject.h"
31 #include <QStringList>
32 
33 namespace farsa {
34 
36  Sensor(params, prefix),
37  icubResource("robot"),
38  neuronsIteratorResource("neuronsIterator")
39 {
40  // Reading parameters
43 
44  // Declaring the resources that are needed here
46 }
47 
49 {
50  // Nothing to do here
51 }
52 
53 void iCubSensor::save(ConfigurationParameters& params, QString prefix)
54 {
55  // Calling parent function
56  Sensor::save(params, prefix);
57 
58  // Saving parameters
59  params.startObjectParameters(prefix, "iCubSensor", this);
60  params.createParameter(prefix, "icub", icubResource);
61  params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
62 }
63 
64 void iCubSensor::describe(QString type)
65 {
66  // Calling parent function
67  Sensor::describe(type);
68 
69  // Describing our parameters
70  Descriptor d = addTypeDescription(type, "The base class for iCub sensors");
71  d.describeString("icub").def("robot").help("the name of the resource associated with the iCub robot to use (default is \"robot\")");
72  d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
73 }
74 
75 void iCubSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
76 {
77  // Calling parent function
78  Sensor::resourceChanged(resourceName, changeType);
79 
80  // Here we only check whether the resource has been deleted and reset the check flag, the
81  // actual work is done in subclasses
82  if (changeType == Deleted) {
84  return;
85  }
86 }
87 
89  iCubSensor(params, prefix),
90  icubMotors(NULL) {
91  icubArm = ConfigurationHelper::getString( params, prefix+"arm", "right" );
92  // Declaring the resources that are needed here
94 }
95 
97  /* nothing to do */
98 }
99 
100 void iCubArmJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
101  iCubSensor::save( params, prefix );
102  params.startObjectParameters( prefix, "iCubArmJointsSensor", this );
103  params.createParameter( prefix, "arm", icubArm );
104 }
105 
106 void iCubArmJointsSensor::describe( QString type ) {
107  iCubSensor::describe( type );
108  Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub arm" );
109  d.describeEnum( "arm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The arm from which the joint angles are read" );
110 }
111 
113  // Checking all resources we need exist
115 
116  // Acquiring the lock to get resources
117  ResourcesLocker locker( this );
118 
119  QStringList values;
120  for( int i=0; i<7; i++ ) {
121  double value;
122  icubMotors->getEncoder( i, &value );
123  values << QString::number( value );
124  }
125  //exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
126  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
127  evonetIt->setCurrentBlock( name() );
128  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
129  double min, max, value;
130  icubMotors->getEncoder(i, &value);
131  icubMotors->getLimits(i,&min,&max);
132  //normalizziamo i valori dei motori tra 0 ed 1;
133  evonetIt->setInput( linearMap(value,min,max,0,1) );
134  }
135 }
136 
138  return 7;
139 }
140 
141 void iCubArmJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
142  iCubSensor::resourceChanged(resourceName, changeType);
143 
144  if (changeType == Deleted) {
145  return;
146  }
147 
148  if (resourceName == icubResource) {
149  iCubRobot* icub = getResource<iCubRobot>();
150  if ( icubArm == "right" ) {
151  icubMotors = icub->rightArmController();
152  } else {
153  icubMotors = icub->leftArmController();
154  }
155  } else if (resourceName == neuronsIteratorResource) {
156  QString lbl;
157  if ( icubArm == "right" ) {
158  lbl = "R";
159  } else {
160  lbl = "L";
161  }
162 
163  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
164  evonetIt->setCurrentBlock( name() );
165  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
166  evonetIt->setGraphicProperties( lbl + QString("a") + QString::number(i), 0.0, 1.0, Qt::red );
167  }
168  } else {
169  Logger::info("Unknown resource " + resourceName + " for " + name());
170  }
171 }
172 
174  iCubSensor(params, prefix),
175  nObjects(3)
176 {
177  nObjects = ConfigurationHelper::getInt(params, prefix + "nObjects", nObjects);
178  // Declaring the resources that are needed here
179  usableResources( QStringList() << icubResource << neuronsIteratorResource << "objects" );
180 }
181 
182 void ColorCameraSensor::save( ConfigurationParameters& params, QString prefix ) {
183  iCubSensor::save( params, prefix );
184  params.startObjectParameters( prefix, "ColorCameraSensor", this );
185  params.createParameter( prefix, "nObjects", QString::number(nObjects) );
186 }
187 
188 void ColorCameraSensor::describe( QString type ) {
189  iCubSensor::describe( type );
190  Descriptor d = addTypeDescription( type, "Color Camera Sensor" );
191  d.describeInt( "nObjects" ).def( 3 ).help( "Number of Objects" );
192 }
193 
194 // update the camera on the basis of 3 objects that should be defined as red, green, and blue
196 {
197  // Checking all resources we need exist
199 
200  // Acquiring the lock to get resources
201  ResourcesLocker locker( this );
202 
203  QVector<WObject*>& objects = *(getResource<QVector<WObject*> >( "objects" ));
204  iCubRobot* icub = getResource<iCubRobot>(icubResource);
205  // Setting the eye matrix in the projector
206  m_projector.setEyeMatrix(icub->headNeck()[4]->matrix());
207 
208  NeuronsIterator* evonetIt = getResource<NeuronsIterator>(neuronsIteratorResource);
209  evonetIt->setCurrentBlock( name() );
210  // Activating the three parts of the map: Red...
211  for(int i = 0; i < nObjects; i++) {
212  // Checking we don't try to access unexisting objects
213  if (i >= objects.size()) {
214  // Filling with 0.5
215  // up-down
216  evonetIt->setInput( 0.5 );
217  evonetIt->nextNeuron();
218  // right-left
219  evonetIt->setInput( 0.5 );
220  evonetIt->nextNeuron();
221  continue;
222  }
223 
224  // Computing the projection of the object on the retina
225  m_projector.set3DPointWorld(objects[i]->matrix().w_pos);
226 
227  // If the object is within the retina, activating the map
228  if (m_projector.pointInsideImage())
229  {
230  // mapPoint01.x.y = distance bteween the barycentre of the object and the border of the
231  // field of view normalized in the range [0.0,1.0]
232  const ImagePoint mapPoint01 = m_projector.getImagePoint01();
233 
234  // up-down
235  //mapPoint01.y = distance normalized tra 0-1 baricentro oggetto e bord fieldofview
236  evonetIt->setInput( 0.5 - mapPoint01.y );
237  evonetIt->nextNeuron();
238  // right-left
239  evonetIt->setInput( 0.5 - mapPoint01.x );
240  evonetIt->nextNeuron();
241 
242  // Storing the position on the retina of the current object
243  m_objectsRetinaPosition.insert(objects[i],mapPoint01);
244  } else {
245  evonetIt->setInput( 0.0 );
246  evonetIt->nextNeuron();
247  evonetIt->setInput( 0.0 );
248  evonetIt->nextNeuron();
249  // Storing an invalid point for the current object
250  m_objectsRetinaPosition.insert(objects[i], ImagePoint());
251  }
252  }
253 }
254 
256 {
257  return nObjects*2;
258 }
259 
260 void ColorCameraSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
261 {
262  iCubSensor::resourceChanged(resourceName, changeType);
263 
264  if (changeType == Deleted) {
265  return;
266  }
267 
268  if (resourceName == icubResource) {
269  // Nothing to do here, we get the robot using getResource() in update()
270  } else if (resourceName == neuronsIteratorResource) {
271  QString lbl[2];
272  lbl[0] = "H"; //horizontal
273  lbl[1] = "V"; //vertical
274  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
275  evonetIt->setCurrentBlock( name() );
276  for (int obj = 0; obj < nObjects; obj++) {
277  for (int i=0; i < 2; i++, evonetIt->nextNeuron()) {
278  QString label;
279  switch (obj) {
280  case 0:
281  label = QString("Cr")+ lbl[i];//QString::number(i);
282  break;
283  case 1:
284  label = QString("Cg")+lbl[i];//QString::number(i);
285  break;
286  case 2:
287  label = QString("Cb")+lbl[i];//QString::number(i);
288  break;
289  default:
290  label = QString::number(obj)+QString::number(i);
291  break;
292  }
293  evonetIt->setGraphicProperties( label, -1.0, 1.0, Qt::red );
294  }
295  }
296  } else if (resourceName == "objects") {
297  // Nothing to do here, we get objects using getResource() in update()
298  } else {
299  Logger::info("Unknown resource " + resourceName + " for " + name());
300  }
301 }
302 
303 //implementing LinearCameraSensor
305  iCubSensor(params, prefix),
306  nObjects(3)
307 {
308  nObjects = ConfigurationHelper::getInt(params, prefix + "nObjects", nObjects);
309  nReceptors= ConfigurationHelper::getInt(params, prefix + "nReceptors", nReceptors);
310  projectionType=ConfigurationHelper::getInt(params, prefix + "nReceptors", projectionType);
311  receptors=new double[nReceptors];
312  xcoors=new double[nObjects];
313  // Declaring the resources that are needed here
314  usableResources( QStringList() << icubResource << neuronsIteratorResource << "objects" );
315 }
316 
317 void LinearCameraSensor::save( ConfigurationParameters& params, QString prefix ) {
318  iCubSensor::save( params, prefix );
319  params.startObjectParameters( prefix, "LinearCameraSensor", this );
320  params.createParameter( prefix, "nObjects", QString::number(nObjects) );
321 }
322 
323 void LinearCameraSensor::describe( QString type ) {
324  iCubSensor::describe( type );
325  Descriptor d = addTypeDescription( type, "Color Camera Sensor" );
326  d.describeInt( "nObjects" ).def( 3 ).help( "Number of Objects" );
327 }
328 
329 // update the camera on the basis of 3 objects that should be defined as red, green, and blue
331 {
332  // Checking all resources we need exist
334 
335  // Acquiring the lock to get resources
336  ResourcesLocker locker( this );
337 
338  QVector<WObject*>& objects = *(getResource<QVector<WObject*> >( "objects" ));
339  iCubRobot* icub = getResource<iCubRobot>(icubResource);
340  // Setting the eye matrix in the projector
341  m_projector.setEyeMatrix(icub->headNeck()[4]->matrix());
342 
343  NeuronsIterator* evonetIt = getResource<NeuronsIterator>(neuronsIteratorResource);
344  evonetIt->setCurrentBlock( name() );
345  for (int i=0;i<nReceptors;i++) receptors[i]=0; //resetting retina
346 
347  // Activating the three parts of the map: Red...
348  for(int i = 0; i < nObjects; i++) {
349  // Checking we don't try to access unexisting objects
350  if (i >= objects.size()) {
351  //out of bound
352  continue;
353  }
354 
355  // Computing the projection of the object on the retina
356  m_projector.set3DPointWorld(objects[i]->matrix().w_pos);
357 
358  // If the object is within the retina, activating the map
359  if (m_projector.pointInsideImage())
360  {
361  // mapPoint01.x.y = distance bteween the barycentre of the object and the border of the
362  // field of view normalized in the range [0.0,1.0]
363  const ImagePoint mapPoint01 = m_projector.getImagePoint01();
364 
365  // up-down
366  //mapPoint01.y = distance normalized tra 0-1 baricentro oggetto e bord fieldofview
367  /*
368  evonetIt->setInput( 0.5 - mapPoint01.y );
369  evonetIt->nextNeuron();
370  // right-left
371  evonetIt->setInput( 0.5 - mapPoint01.x );
372  evonetIt->nextNeuron();
373  */
374  int rind=(int)(mapPoint01.x*nReceptors);
375  receptors[rind]=1.0;
376  //xcoors[i]=mapPoint01.x;
377  // Storing the position on the retina of the current object
378  m_objectsRetinaPosition.insert(objects[i],mapPoint01);
379  } else {
380  /*
381  evonetIt->setInput( 0.0 );
382  evonetIt->nextNeuron();
383  evonetIt->setInput( 0.0 );
384  evonetIt->nextNeuron();
385  // Storing an invalid point for the current object
386  m_objectsRetinaPosition.insert(objects[i], ImagePoint());
387  */
388  }
389  }
390  //activating the retina
391  for (int i=0;i<nReceptors;i++)
392  {
393  evonetIt->setInput( receptors[i]);
394  evonetIt->nextNeuron();
395 
396  }
397 
398 }
399 
401 {
402  return nReceptors;
403 }
404 
405 void LinearCameraSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
406 {
407  iCubSensor::resourceChanged(resourceName, changeType);
408 
409  if (changeType == Deleted) {
410  return;
411  }
412 
413  if (resourceName == icubResource) {
414  // Nothing to do here, we get the robot using getResource() in update()
415  } else if (resourceName == neuronsIteratorResource) {
416 
417  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
418  evonetIt->setCurrentBlock( name() );
419 
420  for (int i=0; i < nReceptors; i++, evonetIt->nextNeuron())
421  {
422  QString label;
423  label = QString("r")+ QString::number(i);
424 
425 
426  evonetIt->setGraphicProperties( label, 0.0, 1.0, Qt::red );
427  }
428 
429  } else if (resourceName == "objects") {
430  // Nothing to do here, we get objects using getResource() in update()
431  } else {
432  Logger::info("Unknown resource " + resourceName + " for " + name());
433  }
434 }
435 
436 //iCubPalmTargetDistSensor : begin implementation
437 // it returns the distance between right or left palm and a defined target
439  iCubSensor(params, prefix) {
440  icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
441  targetName= ConfigurationHelper::getString( params, prefix+"target", "target" );
442  QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
443  QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
444  if ( vec1.size() == 3 && vec2.size() == 3 ) {
445  linearize = true;
446  bbMin = wVector( vec1[0], vec1[1], vec1[2] );
447  bbMax = wVector( vec2[0], vec2[1], vec2[2] );
448  } else {
449  linearize = false;
450  if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
451  Logger::warning( QString("iCubPalmTargetDistSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
452  }
453  }
454 
455  QVector<double> pal1 = ConfigurationHelper::getVector( params, prefix+"palmOffset" );
456  if ( pal1.size() == 3 ) {
457  addPalmOffset = true;
458  palmOffset = wVector( pal1[0], pal1[1], pal1[2] );
459  } else {
460  addPalmOffset = false;
461  if ( !pal1.isEmpty() ) {
462  Logger::warning( QString("iCubPalmTargetDistSensor %1 - palmOffset parameter is not well specified; It will be ignored").arg(name()) );
463  }
464  }
465 
466  // Declaring the resources that are needed here
468 }
469 
471  /* nothing to do */
472 }
473 
475 {
476  iCubSensor::save( params, prefix );
477  params.startObjectParameters( prefix, "iCubPalmTargetDistSensor", this );
478  params.createParameter( prefix, "palm", icubPalm);
479  params.createParameter( prefix, "target", targetName);
480  if ( linearize ) {
481  params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
482  params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
483  }
484 }
485 
487  iCubSensor::describe( type );
488  Descriptor d = addTypeDescription( type, "Sensor for reading the distance between right or left palm and a specified target" );
489  d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the distance to the target is computed" );
490  d.describeString( "target" ).def( "target" ).help( "The name of the resource associated with the target object" );
491  d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [-1,1]" );
492  d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [-1,1]" );
493  d.describeReal( "palmOffset" ).props( IsList ).help( "The offset respect to the palm on which the distance will be computed" );
494 }
495 
497  // Checking all resources we need exist
499 
500  // Acquiring the lock to get resources
501  ResourcesLocker locker( this );
502 
503  iCubRobot* icub = getResource<iCubRobot>( icubResource );
504  WObject* target = getResource<WObject>( targetName );
505  wVector targetPosInICub = icub->matrix().untransformVector( target->matrix().w_pos );
506  wVector palmPosInICub;
507  if ( isLeft ) {
508  wMatrix t2 = icub->leftArm()[6]->matrix();
509  if ( addPalmOffset ) {
510  t2.w_pos += t2.rotateVector( palmOffset );
511  }
512  palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
513  } else {
514  wMatrix t2 = icub->rightArm()[6]->matrix();
515  if ( addPalmOffset ) {
516  t2.w_pos += t2.rotateVector( palmOffset );
517  }
518  palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
519  }
520 
521  wVector distanceVec = palmPosInICub - targetPosInICub;
522  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
523  evonetIt->setCurrentBlock( name() );
524  for( int i=0; i<3; i++ ) {
525  if ( linearize ) {
526  // linearize into [-1,1]
527  evonetIt->setInput( linearMap( distanceVec[i], bbMin[i], bbMax[i], -1, 1 ) );
528  } else {
529  evonetIt->setInput( distanceVec[i] );
530  }
531  evonetIt->nextNeuron();
532  }
533 }
534 
536  return 3;
537 }
538 
539 void iCubPalmTargetDistSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
540  iCubSensor::resourceChanged(resourceName, changeType);
541 
542  if (changeType == Deleted) {
543  return;
544  }
545 
546  if (resourceName == icubResource) {
547  // Nothing to do here, we get the robot using getResource() in update()
548  } else if (resourceName == neuronsIteratorResource) {
549  QString lbl;
550  if ( icubPalm == "right" ) {
551  lbl="R";
552  isLeft = false;
553  } else {
554  lbl="L";
555  isLeft = true;
556  }
557 
558  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
559  evonetIt->setCurrentBlock( name() );
560  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
561  evonetIt->setGraphicProperties( lbl+QString("d")+QString::number(i), -1.0, 1.0, Qt::red );
562  }
563  } else if (resourceName == targetName) {
564  // Nothing to do here, we get the taget using getResource() in update()
565  } else {
566  Logger::info("Unknown resource " + resourceName + " for " + name());
567  }
568 }
569 
570 //iCubPalmTargetDistSensor : end implementation
571 
572 // iCubPalmTouchSEnsor begin implementation
574  iCubSensor(params, prefix) {
575  wPalm=NULL;
576  objects=NULL;
577  icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
578  // Declaring the resources that are needed here
579  usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
580 }
581 
582 void iCubPalmTouchSensor::describe( QString type ) {
583  iCubSensor::describe( type );
584  Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
585  d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the touch sensor is read" );
586 
587 }
589  // Checking all resources we need exist
591 
592  // Acquiring the lock to get resources
593  ResourcesLocker locker( this );
594 
595  double touch=0.0;
596  //we should put following instruction in the resourceChanged method
597  World *world = getResource<World>("world");
598  if(objects!=NULL) {
599  for(int i=0;i<objects->size();i++)
600  {
601  if(world->checkContacts((PhyObject*)wPalm,(PhyObject*)objects->at(i)))
602  touch=1.0;
603  }
604  }
605  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
606  evonetIt->setCurrentBlock( name() );
607 
608  for( int i=0; i<size(); i++, evonetIt->nextNeuron() ) {
609  evonetIt->setInput(touch);
610  }
611 }
612 
614  return 1;
615 }
616 
618 {}
619 
621 {
622  iCubSensor::save( params, prefix );
623  params.startObjectParameters( prefix, "iCubPalmTouchSensor", this );
624  params.createParameter( prefix, "palm", icubPalm);
625 
626 }
627 
628 void iCubPalmTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
629  iCubSensor::resourceChanged(resourceName, changeType);
630 
631  if (changeType == Deleted) {
632  return;
633  }
634 
635  if (resourceName == icubResource) {
636  iCubRobot *icub = getResource<iCubRobot>();
637 
638  if ( icubPalm == "right" ) {
639  wPalm = icub->rightArm()[6];
640  } else {
641  wPalm = icub->leftArm()[6];
642  }
643  } else if (resourceName == "world") {
644  // Nothing to do here
645  } else if (resourceName == "objects") {
646  objects = getResource<QVector<WObject*> >();
647  } else if (resourceName == neuronsIteratorResource) {
648  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
649  evonetIt->setCurrentBlock( name() );
650  for( int i=0; i<1; i++, evonetIt->nextNeuron() ) {
651  if(icubPalm=="right") {
652  evonetIt->setGraphicProperties( QString("Rpt"), 0, 1, Qt::red );
653  } else {
654  evonetIt->setGraphicProperties( QString("Lpt"), 0, 1, Qt::red );
655  }
656  }
657  } else {
658  Logger::info("Unknown resource " + resourceName + " for " + name());
659  }
660 }
661 
662 // iCubPalmTouchSEnsor end implementation
663 
665  iCubSensor(params, prefix),
666  m_icubHand("right"),
667  m_checkAllObjects(true),
668  m_world(NULL),
669  m_icubArm(),
670  m_objects(),
671  m_icub(NULL)
672 {
673  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
674  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", true);
675  // Declaring the resources that are needed here
676  usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
677 
678 }
679 
681 {
682 }
683 
685 {
686  iCubSensor::save( params, prefix );
687  params.startObjectParameters(prefix, "iCubHandTouchSensor", this);
688  params.createParameter(prefix, "hand", m_icubHand);
689  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
690 }
691 
693 {
694  iCubSensor::describe( type );
695  Descriptor d = addTypeDescription(type, "Hand touch sensor", "The touch sensor of the iCub hand. There are six sensors: one on the palm and one for each figertip");
696  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
697  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
698 }
699 
701 {
702  // Checking all resources we need exist
704 
705  // Acquiring the lock to get resources
706  ResourcesLocker locker( this );
707 
708  m_objects = *(getResource<QVector<WObject*> >( "objects" ));
709 
710  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
711  evonetIt->setCurrentBlock(name());
712 
713  evonetIt->setInput(handPieceCollides(m_icubArm[6])); // Palm
714  evonetIt->nextNeuron();
715  evonetIt->setInput(handPieceCollides(m_icubArm[19])); // Index
716  evonetIt->nextNeuron();
717  evonetIt->setInput(handPieceCollides(m_icubArm[20])); // Middle
718  evonetIt->nextNeuron();
719  evonetIt->setInput(handPieceCollides(m_icubArm[21])); // Ring
720  evonetIt->nextNeuron();
721  evonetIt->setInput(handPieceCollides(m_icubArm[22])); // Little
722  evonetIt->nextNeuron();
723  evonetIt->setInput(handPieceCollides(m_icubArm[26])); // Thumb
724  evonetIt->nextNeuron();
725 }
726 
728 {
729  return 6;
730 }
731 
732 void iCubHandTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
733 {
734  iCubSensor::resourceChanged(resourceName, changeType);
735 
736  if (changeType == Deleted) {
737  return;
738  }
739 
740  if (resourceName == icubResource) {
741  m_icub = getResource<iCubRobot>();
742  if (m_icubHand == "left") {
743  m_icubArm = m_icub->leftArm();
744  } else {
745  m_icubArm = m_icub->rightArm();
746  }
747  } else if (resourceName == "world") {
748  m_world = getResource<World>();
749  } else if (resourceName == "objects") {
750  // Nothing to do here, we get objects using getResource() in update()
751  } else if (resourceName == neuronsIteratorResource) {
752  QString lbl;
753  if (m_icubHand == "left") {
754  lbl = "L";
755  } else {
756  lbl = "R";
757  }
758 
759  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
760  evonetIt->setCurrentBlock(name());
761  evonetIt->setGraphicProperties(lbl + "pt", 0.0, 1.0, Qt::red);
762  evonetIt->nextNeuron();
763  evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
764  evonetIt->nextNeuron();
765  evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
766  evonetIt->nextNeuron();
767  evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
768  evonetIt->nextNeuron();
769  evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
770  evonetIt->nextNeuron();
771  evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
772  } else {
773  Logger::info("Unknown resource " + resourceName + " for " + name());
774  }
775 }
776 
777 double iCubHandTouchSensor::handPieceCollides(PhyObject* handPiece)
778 {
779  if (m_icub->isKinematic()) {
780  for (int i = 0; i < m_objects.size(); i++) {
781  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects[i]);
782  if ((obj != NULL) && (m_world->checkContacts(handPiece, (PhyObject*) m_objects[i]))) {
783  return 1.0;
784  }
785  }
786 
787  return 0.0;
788  } else {
789  // Taking the vector of contacts. If no contact is present, this returns an empty vector
790  const contactVec& c = m_world->contacts()[handPiece];
791 
792  if (c.size() == 0) {
793  return 0.0;
794  } else if (m_checkAllObjects) {
795  return 1.0;
796  } else {
797  for (int i = 0; i < m_objects.size(); i++) {
798  for (int j = 0; j < c.size(); j++) {
799  if (c[j].collide == m_objects[i]) {
800  return 1.0;
801  }
802  }
803  }
804 
805  return 0.0;
806  }
807  }
808 
809  return 0.0;
810 }
811 
812 // iCubTorsoJointsSensor begin implementation
814  iCubSensor(params, prefix) {
815  // Declaring the resources that are needed here
817 }
818 
819 void iCubTorsoJointsSensor::describe( QString type ) {
820  iCubSensor::describe( type );
821  Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
822 
823 }
824 
826  // Checking all resources we need exist
828 
829  // Acquiring the lock to get resources
830  ResourcesLocker locker( this );
831 
832  double minRot, maxRot;
833  double minFlex, maxFlex;
834  double curRot;
835  double curFlex;
836 
837  icubMotors->getLimits(0, &minRot, &maxRot);
838  icubMotors->getLimits(2, &minFlex, &maxFlex);
839  icubMotors->getEncoder(0, &curRot);
840  icubMotors->getEncoder(2, &curFlex);
841 
842  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
843  evonetIt->setCurrentBlock( name() );
844  evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) * 2.0 - 1.0);
845  evonetIt->nextNeuron();
846  evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
847 
848 }
849 
851  return 2;
852 }
853 
854 void iCubTorsoJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
855  iCubSensor::resourceChanged(resourceName, changeType);
856 
857  if (changeType == Deleted) {
858  return;
859  }
860 
861  if (resourceName == icubResource) {
862  iCubRobot* icub = getResource<iCubRobot>();
863  icubMotors = icub->torsoController();
864  } else if (resourceName == neuronsIteratorResource) {
865  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
866  evonetIt->setCurrentBlock( name() );
867 
868  evonetIt->setGraphicProperties( QString("t0"), 0, 1, Qt::red ); //rotation
869  evonetIt->nextNeuron();
870  evonetIt->setGraphicProperties( QString("t1"), 0, 1, Qt::red ); //flexion
871  evonetIt->nextNeuron();
872  } else {
873  Logger::info("Unknown resource " + resourceName + " for " + name());
874  }
875 }
876 
878 {}
879 
881 {
882  iCubSensor::save( params, prefix );
883  params.startObjectParameters( prefix, "iCubTorsoJointsSensor", this );
884 
885 
886 }
887 // iCubTorsoJointsSensor end implementation
888 
889 // iCubHeadJointsSensor begin implementation
891  iCubSensor(params, prefix) {
892  // Declaring the resources that are needed here
894 }
895 
897 {}
898 
900 {
901  iCubSensor::save( params, prefix );
902  params.startObjectParameters( prefix, "iCubHeadJointsSensor", this );
903 }
904 
905 void iCubHeadJointsSensor::describe( QString type ) {
906  iCubSensor::describe( type );
907  Descriptor d = addTypeDescription( type, "Sensor for reading head sensors" );
908 
909 }
910 
912  // Checking all resources we need exist
914 
915  // Acquiring the lock to get resources
916  ResourcesLocker locker( this );
917 
918  double minRot, maxRot;
919  double minFlex, maxFlex;
920  double curRot;
921  double curFlex;
922 
923  icubMotors->getLimits(0, &minRot, &maxRot);
924  icubMotors->getLimits(2, &minFlex, &maxFlex);
925  icubMotors->getEncoder(0, &curRot);
926  icubMotors->getEncoder(2, &curFlex);
927 
928  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
929  evonetIt->setCurrentBlock( name() );
930  evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) );
931  evonetIt->nextNeuron();
932  evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
933 
934 }
935 
937  return 2;
938 }
939 
940 void iCubHeadJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
941  iCubSensor::resourceChanged(resourceName, changeType);
942 
943  if (changeType == Deleted) {
944  return;
945  }
946 
947  if (resourceName == icubResource) {
948  iCubRobot* icub = getResource<iCubRobot>();
949  icubMotors = icub->headNeckController();
950  } else if (resourceName == neuronsIteratorResource) {
951  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
952  evonetIt->setCurrentBlock( name() );
953 
954  evonetIt->setGraphicProperties( QString("n0"), 0, 1, Qt::red ); //n stands for neck
955  evonetIt->nextNeuron();
956  evonetIt->setGraphicProperties( QString("n1"), 0, 1, Qt::red );
957  evonetIt->nextNeuron();
958  } else {
959  Logger::info("Unknown resource " + resourceName + " for " + name());
960  }
961 }
962 
963 // iCubHeadJointsSensor end implementation
964 
965 
966 // iCubPCHeadJointsSensor begin implementation
968  iCubSensor(params, prefix) {
969 
970  nNeurons = ConfigurationHelper::getInt(params,prefix+"nNeurons", nNeurons);
971 
972  // Declaring the resources that are needed here
973  headDofs[0] =! ConfigurationHelper::getBool(params, prefix+"disableNeckPitch",false);
974  headDofs[1] =! ConfigurationHelper::getBool(params, prefix+"disableNeckRoll",false);
975  headDofs[2] =! ConfigurationHelper::getBool(params, prefix+"disableNeckYaw",false);
976  headDofs[3] =! ConfigurationHelper::getBool(params, prefix+"disableEyesTilt",false);
977  headDofs[4] =! ConfigurationHelper::getBool(params, prefix+"disableEyesVersion",false);
978  headDofs[5] =! ConfigurationHelper::getBool(params, prefix+"disableEyesVergence",false);
980 }
981 
983 {}
984 
986 {
987  iCubSensor::save( params, prefix );
988  params.startObjectParameters( prefix, "iCubPCHeadJointsSensor", this );
989  if (!headDofs[0])
990  params.createParameter(prefix,"disableNeckPitch","true");
991  if (!headDofs[1])
992  params.createParameter(prefix,"disableNeckRoll","true");
993  if (!headDofs[2])
994  params.createParameter(prefix,"disableNeckYaw","true");
995  if (!headDofs[3])
996  params.createParameter(prefix,"disableEyesTilt","true");
997  if (!headDofs[4])
998  params.createParameter(prefix,"disableEyesVersion","true");
999  if (!headDofs[5])
1000  params.createParameter(prefix,"disableEyesVergence","true");
1001 }
1002 
1003 void iCubPCHeadJointsSensor::describe( QString type ) {
1004  iCubSensor::describe( type );
1005  Descriptor d = addTypeDescription( type, "Sensor for reading head sensors" );
1006  d.describeBool("disableNeckPitch").def(false).help("Disables proprioception of the #0 joint of the head: Neck Pitch");
1007  d.describeBool("disableNeckRoll").def(false).help("Disables proprioception of the #1 joint of the head: Neck Roll");
1008  d.describeBool("disableNeckYaw").def(false).help("Disables proprioception of the #2 joint of the head: Neck Yaw");
1009  d.describeBool("disableEyesTilt").def(false).help("Disables proprioception of the #3 joint of the head: Eyes tilt");
1010  d.describeBool("disableEyesVersion").def(false).help("Disables proprioception of the #4 joint of the head: Eyes version");
1011  d.describeBool("disableEyesVergence").def(false).help("Disables proprioception of the #5 joint of the head: Eyes vergence");
1012 
1013 
1014 }
1015 
1017  // Checking all resources we need exist
1019 
1020  // Acquiring the lock to get resources
1021  ResourcesLocker locker( this );
1022 
1023  double minv, maxv;
1024  double value;
1025 
1026 
1027 
1028  for(int i=0;i<6;i++)
1029  {
1030  icubMotors->getLimits(i, &minv, &maxv);
1031  icubMotors->getEncoder(i, &value);
1032  headEncoderValues[i]=(value - minv) / (maxv - minv);
1033 
1034  /*
1035  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
1036  evonetIt->setCurrentBlock( name() );
1037  evonetIt->setInput(((value - minv) / (maxv - minv)) );
1038  evonetIt->nextNeuron();
1039  */
1040  }
1041  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
1042  evonetIt->setCurrentBlock( name() );
1043  for (int i=0;i<6;i++)
1044  {
1045  if(headDofs[i])
1046  for(int n=0;n<nNeurons;n++)
1047  {
1048  evonetIt->setInput(neuronActivation(n,i));
1049  evonetIt->nextNeuron();
1050  }
1051  }
1052 
1053 
1054 
1055 }
1056 
1057 double iCubPCHeadJointsSensor::neuronActivation(int n, int j)
1058 {
1059  int ind=(int)(headEncoderValues[j]*nNeurons);
1060  if (n==ind) return 1.0;
1061  else
1062  return 0.0;
1063 }
1065  int n=0;
1066  for (int i=0;i<6;i++) n+=headDofs[i];
1067  return n*nNeurons;
1068 }
1069 
1070 void iCubPCHeadJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
1071  iCubSensor::resourceChanged(resourceName, changeType);
1072 
1073  if (changeType == Deleted) {
1074  return;
1075  }
1076 
1077  if (resourceName == icubResource) {
1078  iCubRobot* icub = getResource<iCubRobot>();
1079  icubMotors = icub->headNeckController();
1080  } else if (resourceName == neuronsIteratorResource) {
1081  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
1082  evonetIt->setCurrentBlock( name() );
1083  for (int i=0;i<6;i++)
1084  {
1085  if(headDofs[i])
1086  for(int n=0;n<nNeurons;n++)
1087  {
1088  evonetIt->setGraphicProperties( QString("n")+QString::number(i)+QString("-")+QString::number(n), 0, 1, Qt::red ); //n stands for neck
1089  evonetIt->nextNeuron();
1090 
1091  }
1092  }
1093 
1094 
1095  } else {
1096  Logger::info("Unknown resource " + resourceName + " for " + name());
1097  }
1098 }
1099 // end iCubPCHeadJointsSensor
1100 
1101 
1102 // end iCubHandJointsSensor
1104  iCubSensor(params, prefix),
1105  icubMotors(NULL) {
1106  icubHand = ConfigurationHelper::getString( params, prefix+"hand", "right" );
1107  // Declaring the resources that are needed here
1108  usableResources( QStringList() << icubResource << neuronsIteratorResource );
1109 }
1110 
1112  /* nothing to do */
1113 }
1114 
1115 void iCubHandJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
1116  iCubSensor::save( params, prefix );
1117  params.startObjectParameters( prefix, "iCubHandJointsSensor", this );
1118  params.createParameter( prefix, "hand", icubHand );
1119 }
1120 
1121 void iCubHandJointsSensor::describe( QString type ) {
1122  iCubSensor::describe( type );
1123  Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub hand" );
1124  d.describeEnum( "hand" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The hand from which the joint angles are read" );
1125 }
1126 
1128  // Checking all resources we need exist
1130 
1131  // Acquiring the lock to get resources
1132  ResourcesLocker locker( this );
1133 
1134 // QStringList values;
1135 // for( int i=9; i<16; i++ ) {
1136 // double value;
1137 // icubMotors->getEncoder( i, &value );
1138 // values << QString::number( value );
1139 // }
1140 // exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
1141  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
1142  evonetIt->setCurrentBlock( name() );
1143  for( int i=9; i<16; i++, evonetIt->nextNeuron() ) {
1144  double min, max, value;
1145  icubMotors->getEncoder(i, &value);
1146  icubMotors->getLimits(i,&min,&max);
1147  //normalizziamo i valori dei motori tra 0 ed 1;
1148  evonetIt->setInput( linearMap(value,min,max,0,1) );
1149  }
1150 }
1151 
1153  return 7;
1154 }
1155 
1156 void iCubHandJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
1157  iCubSensor::resourceChanged(resourceName, changeType);
1158 
1159  if (changeType == Deleted) {
1160  return;
1161  }
1162 
1163  if (resourceName == icubResource) {
1164  iCubRobot* icub = getResource<iCubRobot>();
1165  if ( icubHand == "right" ) {
1166  icubMotors = icub->rightArmController();
1167  } else {
1168  icubMotors = icub->leftArmController();
1169  }
1170  } else if (resourceName == neuronsIteratorResource) {
1171  QString label;
1172  if ( icubHand == "right" ) {
1173  label="R";
1174  } else {
1175  label="L";
1176  }
1177 
1178  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
1179  evonetIt->setCurrentBlock( name() );
1180  for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
1181  evonetIt->setGraphicProperties( label+QString("f")+QString::number(i), 0.0, 1.0, Qt::red ); //f stands for fingers
1182  }
1183  } else {
1184  Logger::info("Unknown resource " + resourceName + " for " + name());
1185  }
1186 }
1187 // end iCubHandJointsSensor
1188 
1189 namespace __PalmAndFingertipTouchSensor_internal {
1190  #ifndef GLMultMatrix
1191  #define GLMultMatrix glMultMatrixf
1192  // for double glMultMatrixd
1193  #endif
1194 
1198  const float epsilon = 0.0001f;
1199 
1206  const float numDivisionsFor90Degrees = 5.0f;
1207 
1211  const int maxNumContacts = 20;
1212 
1220  {
1221  public:
1247  FingertipTouchSensorGraphic(PhyObject *handPiece, double alpha, double h, bool isRight, bool isThumb, QString name = "unamed") :
1248  GraphicalWObject(handPiece->world(), name),
1249  m_handPiece(dynamic_cast<PhyCylinder*>(handPiece)),
1250  m_handPieceHeight(m_handPiece->height()),
1251  m_handPieceRadius(m_handPiece->radius()),
1252  m_alpha(alpha),
1253  m_h(h),
1254  m_alphaOffset(computeAlphaOffset(isRight, isThumb)),
1255  m_angularIncrement(m_alpha / (ceil(m_alpha / (M_PI / 2.0)) * numDivisionsFor90Degrees)),
1257  m_endingAngle((m_alpha / 2.0) + m_alphaOffset),
1258  m_isActive(false),
1259  m_isActiveMutex()
1260  {
1261  // Attaching to handPiece (which also becomes our owner)
1262  attachToObject(m_handPiece, true);
1263 
1264  // We also use our own color and texture
1266  setTexture("");
1267  setColor(Qt::cyan);
1268  }
1269 
1274  {
1275  }
1276 
1284  void setActive(bool isActive)
1285  {
1286  m_isActiveMutex.lock();
1287  m_isActive = isActive;
1288  m_isActiveMutex.unlock();
1289  }
1290 
1291  protected:
1300  virtual void render(RenderWObject* renderer, QGLContext* gw)
1301  {
1302  // First of all changing our color depending on the value of m_isActive
1303  m_isActiveMutex.lock();
1304  if (m_isActive) {
1305  setColor(Qt::red);
1306  } else {
1307  setColor(Qt::cyan);
1308  }
1309  m_isActiveMutex.unlock();
1310 
1311  // Bringing the coordinate system on the fingerip
1312  wMatrix mtr = tm;
1313  mtr.w_pos += mtr.x_ax.scale(m_handPieceHeight / 2.0);
1314 
1315  glPushMatrix();
1316  renderer->container()->setupColorTexture(gw, renderer);
1317  GLMultMatrix(&mtr[0][0]);
1318 
1319  // Drawing the top part of the sensor
1320  glBegin(GL_TRIANGLES);
1321 
1322  // All normals here are along the x axis. All triangles have a vertex on
1323  // the axis of the cylinder
1324  const float adjustedRadius = m_handPieceRadius + epsilon;
1325  for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
1326  // Computing the next angle (we have to do this to avoid numerical errors)
1327  const float nextAngle = angle + m_angularIncrement;
1328  const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
1329  glNormal3f(1.0, 0.0, 0.0);
1330  glVertex3f(epsilon, 0.0, 0.0);
1331  glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1332  glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1333  }
1334  glEnd();
1335 
1336  // Now drawing the remaining part
1337  glBegin(GL_QUADS);
1338 
1339  // Here we have to compute the right normal for each face
1340  for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
1341  // Computing the next angle (we have to do this to avoid numerical errors)
1342  const float nextAngle = angle + m_angularIncrement;
1343  const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
1344  // To compute the normal we take two vectors along two adiacent sides of the quad, compute the cross
1345  // product and then normalize it (the product order is important, of course)
1346  const wVector v1(0.0, sin(angle) - sin(angle + m_angularIncrement), cos(angle) - cos(angle + m_angularIncrement));
1347  const wVector v2(1.0, 0.0, 0.0);
1348  const wVector normal = (v1 * v2).normalize();
1349  glNormal3f(normal.x, normal.y, normal.z);
1350 
1351  glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1352  glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1353  glVertex3f(-m_h, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1354  glVertex3f(-m_h, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
1355  }
1356  glEnd();
1357  glPopMatrix();
1358  }
1359 
1360  static float computeAlphaOffset(bool isRight, bool isThumb) {
1361  // The angle we use when drawing is respect to the z axis on the yz plane. Here we compute
1362  // the offsets needed in different cases (see the comment in the function
1363  // iCubFingertipsTouchSensor::goodCollisionPoint for more information about the frame of
1364  // references of the various fingers in the two hands)
1365  float offset = 0.0;
1366  if (isRight) {
1367  offset = isThumb ? M_PI / 2.0 : 0.0;
1368  } else {
1369  offset = isThumb ? M_PI / 2.0 : M_PI;
1370  }
1371 
1372  return offset;
1373  }
1374 
1380 
1384  const real m_handPieceHeight;
1385 
1389  const real m_handPieceRadius;
1390 
1394  const float m_alpha;
1395 
1399  const float m_h;
1400 
1408  const float m_alphaOffset;
1409 
1416  const float m_angularIncrement;
1417 
1421  const float m_startingAngle;
1422 
1426  const float m_endingAngle;
1427 
1434 
1442  };
1443 
1451  {
1452  public:
1472  PalmPatchesTouchSensorGraphic(PhyObject *handPalm, const QVector<iCubPalmPatchesTouchSensor::Triangle>& patches, bool isRight, QString name = "unamed") :
1473  GraphicalWObject(handPalm->world(), name),
1474  m_handPalm(dynamic_cast<PhyBox*>(handPalm)),
1475  m_patches(patches),
1476  m_isRight(isRight),
1477  m_zAxisDirection(isRight ? 1.0 : -1.0),
1478  m_activations(m_patches.size(), false),
1480  {
1481  // Attaching to handPalm (which also becomes our owner)
1482  attachToObject(m_handPalm, true);
1483 
1484  // We also use our own color and texture
1486  setTexture("");
1487  setColor(Qt::cyan);
1488  }
1489 
1494  {
1495  }
1496 
1503  void setActivations(const QVector<bool> activations)
1504  {
1505  m_activationsMutex.lock();
1506  m_activations = activations;
1507  m_activationsMutex.unlock();
1508  }
1509 
1510  protected:
1519  virtual void render(RenderWObject* renderer, QGLContext* gw)
1520  {
1521  // Copying the m_activations vector to a local vector to avoid concurrent accesses
1522  m_activationsMutex.lock();
1523  const QVector<bool> activations(m_activations);
1524  m_activationsMutex.unlock();
1525 
1526  // We receive the list of triangles from the sensor, we just need to display them
1527  glPushMatrix();
1528  renderer->container()->setupColorTexture(gw, renderer);
1529  GLMultMatrix(&tm[0][0]);
1530 
1531  // First drawing the triangles making up the sensor
1532  glBegin(GL_TRIANGLES);
1533  glNormal3f(0.0, 0.0, m_zAxisDirection);
1534  for (int i = 0; i < m_patches.size(); i++) {
1536 
1537  QColor col;
1538  if (activations[i]) {
1539  col = Qt::red;
1540  } else {
1541  col = Qt::cyan;
1542  }
1543  glColor4f(col.redF(), col.greenF(), col.blueF(), col.alphaF());
1544 
1545  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1546  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1547  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1548  }
1549  glEnd();
1550 
1551  // Now drawing the lines separating the triangles. Using the for we draw some line twice,
1552  // it's not a big problem
1553  glBegin(GL_LINES);
1554  glNormal3f(0.0, 0.0, m_zAxisDirection);
1555  glColor4f(0.0, 0.0, 0.0, 1.0);
1556  for (int i = 0; i < m_patches.size(); i++) {
1558 
1559  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1560  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1561 
1562  glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
1563  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1564 
1565  glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
1566  glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
1567  }
1568  glEnd();
1569  glPopMatrix();
1570  }
1571 
1577 
1581  const QVector<iCubPalmPatchesTouchSensor::Triangle> m_patches;
1582 
1587  const bool m_isRight;
1588 
1596  const float m_zAxisDirection;
1597 
1605  QVector<bool> m_activations;
1606 
1614  };
1615 }
1616 
1617 using namespace __PalmAndFingertipTouchSensor_internal;
1618 
1620  iCubSensor(params, prefix),
1621  m_icubHand("right"),
1622  m_isRight(true),
1623  m_checkAllObjects(true),
1624  m_alpha(M_PI / 4.0), // 45°
1625  m_h(0.01),
1626  m_drawSensor(true),
1627  m_world(NULL),
1628  m_icubArm(),
1629  m_objects(NULL),
1630  m_icub(NULL),
1631  m_graphicalTouchSensors()
1632 {
1633  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
1634  if (m_icubHand.toLower() == "right") {
1635  m_isRight = true;
1636  } else if (m_icubHand.toLower() == "left") {
1637  m_isRight = false;
1638  } else {
1639  ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1640  }
1641  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1642  m_alpha = toRad(ConfigurationHelper::getDouble(params, prefix + "alpha", toDegree(m_alpha)));
1643  m_h = ConfigurationHelper::getDouble(params, prefix + "h", m_h);
1644  m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
1645 
1646  // Declaring the resources that are needed here
1647  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1648 }
1649 
1651 {
1652  // Nothing to do here, renderers are destroyed by their owners
1653 }
1654 
1656 {
1657  iCubSensor::save(params, prefix);
1658  params.startObjectParameters(prefix, "iCubFingertipsTouchSensor", this);
1659  params.createParameter(prefix, "hand", m_icubHand);
1660  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1661  params.createParameter(prefix, "alpha", QString::number(toDegree(m_alpha)));
1662  params.createParameter(prefix, "h", QString::number(m_h));
1663  params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1664 }
1665 
1667 {
1668  iCubSensor::describe(type);
1669  Descriptor d = addTypeDescription(type, "Hand fingertips touch sensor", "The touch sensor of the iCub fingertips. There are five sensors, one for each figertip");
1670  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1671  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1672  d.describeReal("alpha").def(45.0).help("The aperture of the sensor surface", "The aperture angle of the sensor surface in degrees (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1673  d.describeReal("h").def(0.01).help("The height of the sensor surface", "The height of the sensor surface (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1674  d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1675 }
1676 
1678 {
1679  // Checking all resources we need exist
1681 
1682  // Acquiring the lock to get resources
1683  ResourcesLocker locker(this);
1684 
1685  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
1686  evonetIt->setCurrentBlock(name());
1687 
1688  // These are the indexes of fingertips in the vector of icub arm parts
1689  static const unsigned int indexes[] = {19, 20, 21, 22, 26};
1690  for (unsigned int i = 0; i < 5; i++) {
1691  // The thumb is the last index (26)
1692  const double collision = handPieceCollides(m_icubArm[indexes[i]], (i == 4) ? true : false);
1693  // If sensors are also drawn, we change the color of the sensor depending on whether it
1694  // collides with an object or not
1695  if (m_drawSensor) {
1696  m_graphicalTouchSensors[i]->setActive((collision > 0.5));
1697  }
1698  evonetIt->setInput(collision);
1699  evonetIt->nextNeuron();
1700  }
1701 }
1702 
1704 {
1705  return 5;
1706 }
1707 
1708 void iCubFingertipsTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1709 {
1710  iCubSensor::resourceChanged(resourceName, changeType);
1711 
1712  if (changeType == Deleted) {
1713  return;
1714  }
1715 
1716  if (resourceName == icubResource) {
1717  m_icub = getResource<iCubRobot>();
1718  if (m_isRight) {
1719  m_icubArm = m_icub->rightArm();
1720  } else {
1721  m_icubArm = m_icub->leftArm();
1722  }
1723 
1724  // Checking if we have to draw the sensors. This is here because it requires a pointer to icub parts
1725  if (m_drawSensor) {
1726  m_graphicalTouchSensors.clear();
1727 
1728  // Creating graphical objects representing the touch sensors areas. They will set the finger piece as
1729  // their owner so that the destruction of the objects is handled by them
1730  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[19], m_alpha, m_h, m_isRight, false)); // Index
1731  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[20], m_alpha, m_h, m_isRight, false)); // Middle
1732  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[21], m_alpha, m_h, m_isRight, false)); // Ring
1733  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[22], m_alpha, m_h, m_isRight, false)); // Little
1734  m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[26], m_alpha, m_h, m_isRight, true)); // Thumb
1735  }
1736  } else if (resourceName == neuronsIteratorResource) {
1737  QString lbl;
1738  if (m_isRight) {
1739  lbl = "R";
1740  } else {
1741  lbl = "L";
1742  }
1743 
1744  EvonetIterator* evonetIt = getResource<EvonetIterator>();
1745  evonetIt->setCurrentBlock(name());
1746  evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
1747  evonetIt->nextNeuron();
1748  evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
1749  evonetIt->nextNeuron();
1750  evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
1751  evonetIt->nextNeuron();
1752  evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
1753  evonetIt->nextNeuron();
1754  evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
1755  evonetIt->nextNeuron();
1756  } else if (resourceName == "objects") {
1757  m_objects = getResource<QVector<WObject*> >();
1758  } else if (resourceName == "world") {
1759  m_world = getResource<World>();
1760  } else {
1761  Logger::info("Unknown resource " + resourceName + " for " + name());
1762  }
1763 }
1764 
1765 double iCubFingertipsTouchSensor::handPieceCollides(PhyObject* handPiece, bool isThumb) const
1766 {
1767  if (m_icub->isKinematic() || !m_checkAllObjects) {
1768  for (int i = 0; i < m_objects->size(); i++) {
1769  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
1770  QVector<wVector> contacts;
1771  if ((obj != NULL) && (m_world->smartCheckContacts(handPiece, (PhyObject*) m_objects->at(i), maxNumContacts, &contacts))) {
1772  for (int j = 0; j < contacts.size(); j++) {
1773  if (goodCollisionPoint(handPiece, contacts[j], isThumb)) {
1774  return 1.0;
1775  }
1776  }
1777  }
1778  }
1779 
1780  return 0.0;
1781  } else {
1782  // Taking the vector of contacts. If no contact is present, this returns an empty vector
1783  const contactVec& c = m_world->contacts()[handPiece];
1784 
1785  for (int i = 0; i < c.size(); i++) {
1786  if (goodCollisionPoint(handPiece, c[i].pos, isThumb)) {
1787  return 1.0;
1788  }
1789  }
1790 
1791  return 0.0;
1792  }
1793 
1794  return 0.0;
1795 }
1796 
1797 bool iCubFingertipsTouchSensor::goodCollisionPoint(PhyObject* handPiece, const wVector& collisionPoint, bool isThumb) const
1798 {
1799  // The various fingertips have frame of references with different orientations, so the direction towards
1800  // the palm (i.e. where the touch sensor area lies) is along different axes:
1801  // - right hand:
1802  // - thumb: +y axis
1803  // - all other fingers: +z axis
1804  // - left hand:
1805  // - thumb: +y axis
1806  // - all other fingers: -z axis
1807  // Here we calculate the angle on the yz plane, but the 0 angle is on different axes depending on the
1808  // hand piece, as in the list above
1809 
1810  float angle;
1811  if (isThumb) {
1812  angle = atan2(collisionPoint.z, collisionPoint.y);
1813  } else {
1814  if (m_isRight) {
1815  angle = atan2(collisionPoint.y, collisionPoint.z);
1816  } else {
1817  angle = atan2(collisionPoint.y, -collisionPoint.z);
1818  }
1819  }
1820  // Also computing the distance from the fingertip (to ease the check below)
1821  const float distFromFingertip = (dynamic_cast<PhyCylinder*>(handPiece))->height() / 2.0 - collisionPoint.x;
1822 
1823  // Checking if the collision point is good
1824  if ((angle >= -m_alpha) && (angle <= m_alpha) && (distFromFingertip <= m_h)) {
1825  return true;
1826  }
1827 
1828  return false;
1829 }
1830 
1832  iCubSensor(params, prefix),
1833  m_icubHand("right"),
1834  m_isRight(true),
1835  m_checkAllObjects(true),
1836  m_drawSensor(true),
1837  m_world(NULL),
1838  m_icubArm(),
1839  m_objects(NULL),
1840  m_icub(NULL),
1841  m_handPalm(NULL),
1842  m_patches(),
1843  m_graphicalTouchSensor(NULL)
1844 {
1845  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
1846  if (m_icubHand.toLower() == "right") {
1847  m_isRight = true;
1848  } else if (m_icubHand.toLower() == "left") {
1849  m_isRight = false;
1850  } else {
1851  ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1852  }
1853  m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1854  m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
1855 
1856  // Declaring the resources that are needed here
1857  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1858 }
1859 
1861 {
1862  // Nothing to do here, the renderer is destroyed by its owners
1863 }
1864 
1866 {
1867  iCubSensor::save(params, prefix);
1868  params.startObjectParameters(prefix, "iCubPalmPatchesTouchSensor", this);
1869  params.createParameter(prefix, "hand", m_icubHand);
1870  params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1871  params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1872 }
1873 
1875 {
1876  iCubSensor::describe(type);
1877  Descriptor d = addTypeDescription(type, "Hand palm touch sensor", "The touch sensor of the iCub hand palm. There are four sensors, roughly in the same positions of the four patches on the real iCub hand");
1878  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1879  d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1880  d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1881 }
1882 
1884 {
1885  // Checking all resources we need exist
1887 
1888  // Acquiring the lock to get resources
1889  ResourcesLocker locker(this);
1890 
1891  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
1892  evonetIt->setCurrentBlock(name());
1893 
1894  // First of all we have to get the list of collision points
1895  QVector<wVector> contacts;
1896  if (m_icub->isKinematic() || !m_checkAllObjects) {
1897  // Checking contacts with objects in the list. Appending all contacts to the contacts vector
1898  for (int i = 0; i < m_objects->size(); i++) {
1899  PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
1900  QVector<wVector> contactsWithObj;
1901  if (obj != NULL) {
1902  m_world->smartCheckContacts(m_handPalm, (PhyObject*) m_objects->at(i), maxNumContacts, &contactsWithObj);
1903  contacts << contactsWithObj;
1904  }
1905  }
1906  } else {
1907  // Taking the vector of contacts. If no contact is present, this returns an empty vector
1908  const contactVec& c = m_world->contacts()[m_handPalm];
1909 
1910  for (int i = 0; i < c.size(); i++) {
1911  contacts.append(c[i].pos);
1912  }
1913  }
1914 
1915  // Now we have to check each contact point for each triangle. We also save activations into a QVector
1916  // if the sensor is drawn
1917  QVector<bool> activations;
1918  if (m_drawSensor) {
1919  activations.fill(false, m_patches.size());
1920  }
1921  for (int i = 0; i < m_patches.size(); i++) {
1922  float activation = 0.0;
1923  for (int j = 0; j < contacts.size(); j++) {
1924  if (pointInPalmTriangle(contacts[j], m_patches[i])) {
1925  activation = 1.0;
1926  if (m_drawSensor) {
1927  activations[i] = true;
1928  }
1929  break;
1930  }
1931  }
1932  evonetIt->setInput(activation);
1933  evonetIt->nextNeuron();
1934  }
1935  if (m_drawSensor) {
1936  m_graphicalTouchSensor->setActivations(activations);
1937  }
1938 }
1939 
1941 {
1942  // The number of patches is always 4
1943  return 4;
1944 }
1945 
1946 void iCubPalmPatchesTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1947 {
1948  iCubSensor::resourceChanged(resourceName, changeType);
1949 
1950  if (changeType == Deleted) {
1951  return;
1952  }
1953 
1954  if (resourceName == icubResource) {
1955  m_icub = getResource<iCubRobot>();
1956  if (m_isRight) {
1957  m_icubArm = m_icub->rightArm();
1958  } else {
1959  m_icubArm = m_icub->leftArm();
1960  }
1961  m_handPalm = dynamic_cast<PhyBox*>(m_icubArm[6]);
1962 
1963  // Creating the list of triangles. The palmDirection constant is needed because the right and
1964  // left hand have different frame of references: in the right hand the palm is towards +z, while
1965  // in the left hand it is towards -z. The patches center is not in the middle of the palm along
1966  // the y axis, it is slighly moved torads the side opposite to the thumb
1967  const float palmDirection = m_isRight ? +1.0 : -1.0;
1968  const float triangleSide = m_handPalm->sideX() / 2.0f;
1969  const float triangleHalfSide = triangleSide / 2.0f;
1970  const float triangleHeight = sqrt((triangleSide * triangleSide) - (triangleHalfSide * triangleHalfSide));
1971  const float palmZ = palmDirection * m_handPalm->sideZ() / 2.0;
1972  const float palmCenterY = (m_handPalm->sideY() / 2.0) - triangleHeight * 1.1;
1973  Triangle t;
1974  m_patches.clear();
1975 
1976  t.a = wVector(0.0, palmCenterY, palmZ);
1977  t.b = wVector(triangleSide, palmCenterY, palmZ);
1978  t.c = wVector(triangleHalfSide, palmCenterY - triangleHeight, palmZ);
1979  m_patches.append(t);
1980 
1981  t.a = wVector(0.0, palmCenterY, palmZ);
1982  t.b = wVector(triangleSide, palmCenterY, palmZ);
1983  t.c = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1984  m_patches.append(t);
1985 
1986  t.a = wVector(0.0, palmCenterY, palmZ);
1987  t.b = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1988  t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1989  m_patches.append(t);
1990 
1991  t.a = wVector(0.0, palmCenterY, palmZ);
1992  t.b = wVector(-triangleSide, palmCenterY, palmZ);
1993  t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
1994  m_patches.append(t);
1995 
1996  if (m_drawSensor) {
1997  m_graphicalTouchSensor = new PalmPatchesTouchSensorGraphic(m_handPalm, m_patches, m_isRight);
1998  }
1999  } else if (resourceName == neuronsIteratorResource) {
2000  QString lbl;
2001  if (m_isRight) {
2002  lbl = "R";
2003  } else {
2004  lbl = "L";
2005  }
2006 
2007  EvonetIterator* evonetIt = getResource<EvonetIterator>();
2008  evonetIt->setCurrentBlock(name());
2009  evonetIt->setGraphicProperties(lbl + "p1", 0.0, 1.0, Qt::red);
2010  evonetIt->nextNeuron();
2011  evonetIt->setGraphicProperties(lbl + "p2", 0.0, 1.0, Qt::red);
2012  evonetIt->nextNeuron();
2013  evonetIt->setGraphicProperties(lbl + "p3", 0.0, 1.0, Qt::red);
2014  evonetIt->nextNeuron();
2015  evonetIt->setGraphicProperties(lbl + "p4", 0.0, 1.0, Qt::red);
2016  evonetIt->nextNeuron();
2017  } else if (resourceName == "objects") {
2018  m_objects = getResource<QVector<WObject*> >();
2019  } else if (resourceName == "world") {
2020  m_world = getResource<World>();
2021  } else {
2022  Logger::info("Unknown resource " + resourceName + " for " + name());
2023  }
2024 }
2025 
2026 bool iCubPalmPatchesTouchSensor::pointInPalmTriangle(const wVector& point, const Triangle& triangle) const
2027 {
2028  // Checking that the point is on the palm side of the hand
2029  if (((m_isRight) && (point.z < 0.0)) || ((!m_isRight) && (point.z > 0.0))) {
2030  return false;
2031  }
2032 
2033  // The algorithm used here uses Barycentric Coordinates to check if a point is inside a triangle or not.
2034  // You can find mode information at the following links:
2035  // http://en.wikipedia.org/wiki/Barycentric_coordinates_(mathematics)
2036  // http://www.blackpawn.com/texts/pointinpoly/default.html
2037  // The version implemented here is directly taken from the second link (an offline version is in the
2038  // documentation). We discard the z coordinate (and do computations in 2D) because the check on z has
2039  // already been done before
2040 
2041  // Compute vectors
2042  const float v0x = triangle.c.x - triangle.a.x;
2043  const float v0y = triangle.c.y - triangle.a.y;
2044  const float v1x = triangle.b.x - triangle.a.x;
2045  const float v1y = triangle.b.y - triangle.a.y;
2046  const float v2x = point.x - triangle.a.x;
2047  const float v2y = point.y - triangle.a.y;
2048 
2049  // Compute dot products
2050  const float dot00 = v0x * v0x + v0y * v0y;
2051  const float dot01 = v0x * v1x + v0y * v1y;
2052  const float dot02 = v0x * v2x + v0y * v2y;
2053  const float dot11 = v1x * v1x + v1y * v1y;
2054  const float dot12 = v1x * v2x + v1y * v2y;
2055 
2056  // Compute barycentric coordinates
2057  const float invDenom = 1.0 / (dot00 * dot11 - dot01 * dot01);
2058  const float u = (dot11 * dot02 - dot01 * dot12) * invDenom;
2059  const float v = (dot00 * dot12 - dot01 * dot02) * invDenom;
2060 
2061  // Check if point is in triangle
2062  return (u >= 0) && (v >= 0) && (u + v < 1);
2063 }
2064 
2066  iCubSensor(params, prefix),
2067  m_icubHand("right"),
2068  m_world(NULL),
2069  m_eye(NULL),
2070  m_hand(NULL),
2071  m_object(NULL)
2072 {
2073  m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
2074 
2075  // Declaring the resources that are needed here
2076  usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
2077 }
2078 
2080 {
2081 }
2082 
2084 {
2085  iCubSensor::save(params, prefix);
2086 
2087  params.startObjectParameters(prefix, "HandObjectVisualOffsetSensor", this);
2088  params.createParameter(prefix, "hand", m_icubHand);
2089 }
2090 
2092 {
2093  iCubSensor::describe(type);
2094  Descriptor d = addTypeDescription(type, "Visual offset between the hand and the object", "This sensor computes the distance between the hand and the first object in the visual field of the robot. Returns the distances on the vertical and horizontal axes");
2095  d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
2096 }
2097 
2099 {
2100  // Checking all resources we need exist
2102 
2103  // Acquiring the lock to get resources
2104  ResourcesLocker locker(this);
2105 
2106  // We get this here because we are not notified if the vector changes (i.e. elemets are added or deleted),
2107  // only if the vector is replaced with another vector (and that doesn't happend)
2108  m_object = (*(getResource<QVector<WObject*> >("objects")))[0];
2109 
2110  // Setting the eye matrix in the projector
2111  Projector projector;
2112  projector.setEyeMatrix(m_eye->matrix());
2113 
2114  // Computing the projection of the object on the retina
2115  projector.set3DPointWorld(m_object->matrix().w_pos);
2116  const ImagePoint objPos = projector.getImagePoint01();
2117 
2118  // Computing the projection of the hand on the retina
2119  projector.set3DPointWorld(m_hand->matrix().w_pos);
2120  const ImagePoint handPos = projector.getImagePoint01();
2121 
2122  double dx, dy;
2123  if(objPos.isValid() && handPos.isValid()) {
2124  dx = objPos.x - handPos.x;
2125  dx = tanh(5*dx);
2126 
2127  dy = objPos.y - handPos.y;
2128  dy = tanh(5*dx);
2129  } else {
2130  dx = 0;
2131  dy = 0;
2132  }
2133 
2134  EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
2135  evonetIt->setCurrentBlock(name());
2136  evonetIt->setInput(dx);
2137  evonetIt->nextNeuron();
2138  evonetIt->setInput(dy);
2139 }
2140 
2142 {
2143  return 2;
2144 }
2145 
2146 void HandObjectVisualOffsetSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
2147 {
2148  iCubSensor::resourceChanged(resourceName, changeType);
2149 
2150  if (changeType == Deleted) {
2151  return;
2152  }
2153 
2154  if (resourceName == icubResource) {
2155  iCubRobot* icub = getResource<iCubRobot>();
2156  m_eye = icub->headNeck()[4];
2157  if (m_icubHand == "left") {
2158  m_hand = icub->leftArm()[6];
2159  } else {
2160  m_hand = icub->rightArm()[6];
2161  }
2162  } else if (resourceName == neuronsIteratorResource) {
2163  EvonetIterator* evonetIt = getResource<EvonetIterator>();
2164  evonetIt->setCurrentBlock(name());
2165  evonetIt->setGraphicProperties("dx", -1.0, 1.0, Qt::red);
2166  evonetIt->nextNeuron();
2167  evonetIt->setGraphicProperties("dy", -1.0, 1.0, Qt::red);
2168  } else if (resourceName == "objects") {
2169  // Nothing to do here, we get objects with getResource() in update()
2170  } else if (resourceName == "world") {
2171  m_world = getResource<World>();
2172  } else {
2173  Logger::info("Unknown resource " + resourceName + " for " + name());
2174  }
2175 }
2176 
2177 } // end namespace farsa
2178 
2179 #endif