experiments/src/icubsensors.cpp

00001 /********************************************************************************
00002  *  FARSA Experiments Library                                                   *
00003  *  Copyright (C) 2007-2012                                                     *
00004  *  Gianluca Massera <emmegian@yahoo.it>                                        *
00005  *  Stefano Nolfi <stefano.nolfi@istc.cnr.it>                                   *
00006  *  Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it>                         *
00007  *  Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it>                           *
00008  *                                                                              *
00009  *  This program is free software; you can redistribute it and/or modify        *
00010  *  it under the terms of the GNU General Public License as published by        *
00011  *  the Free Software Foundation; either version 2 of the License, or           *
00012  *  (at your option) any later version.                                         *
00013  *                                                                              *
00014  *  This program is distributed in the hope that it will be useful,             *
00015  *  but WITHOUT ANY WARRANTY; without even the implied warranty of              *
00016  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               *
00017  *  GNU General Public License for more details.                                *
00018  *                                                                              *
00019  *  You should have received a copy of the GNU General Public License           *
00020  *  along with this program; if not, write to the Free Software                 *
00021  *  Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA  *
00022  ********************************************************************************/
00023 
00024 #ifdef FARSA_USE_YARP_AND_ICUB
00025 
00026 #include "icubsensors.h"
00027 #include "configurationhelper.h"
00028 #include "motorcontrollers.h"
00029 #include "logger.h"
00030 #include "graphicalwobject.h"
00031 #include <QStringList>
00032 
00033 namespace farsa {
00034 
00035 iCubSensor::iCubSensor(ConfigurationParameters& params, QString prefix) :
00036     Sensor(params, prefix),
00037     icubResource("robot"),
00038     neuronsIteratorResource("neuronsIterator")
00039 {
00040     // Reading parameters
00041     icubResource = ConfigurationHelper::getString(params, prefix + "icub", icubResource);
00042     neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", neuronsIteratorResource);
00043 
00044     // Declaring the resources that are needed here
00045     usableResources(QStringList() << icubResource << neuronsIteratorResource);
00046 }
00047 
00048 iCubSensor::~iCubSensor()
00049 {
00050     // Nothing to do here
00051 }
00052 
00053 void iCubSensor::save(ConfigurationParameters& params, QString prefix)
00054 {
00055     // Calling parent function
00056     Sensor::save(params, prefix);
00057 
00058     // Saving parameters
00059     params.startObjectParameters(prefix, "iCubSensor", this);
00060     params.createParameter(prefix, "icub", icubResource);
00061     params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
00062 }
00063 
00064 void iCubSensor::describe(QString type)
00065 {
00066     // Calling parent function
00067     Sensor::describe(type);
00068 
00069     // Describing our parameters
00070     Descriptor d = addTypeDescription(type, "The base class for iCub sensors");
00071     d.describeString("icub").def("robot").help("the name of the resource associated with the iCub robot to use (default is \"robot\")");
00072     d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
00073 }
00074 
00075 void iCubSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
00076 {
00077     // Calling parent function
00078     Sensor::resourceChanged(resourceName, changeType);
00079 
00080     // Here we only check whether the resource has been deleted and reset the check flag, the
00081     // actual work is done in subclasses
00082     if (changeType == Deleted) {
00083         resetNeededResourcesCheck();
00084         return;
00085     }
00086 }
00087 
00088 iCubArmJointsSensor::iCubArmJointsSensor( ConfigurationParameters& params, QString prefix ) :
00089     iCubSensor(params, prefix),
00090     icubMotors(NULL) {
00091     icubArm = ConfigurationHelper::getString( params, prefix+"arm", "right" );
00092     // Declaring the resources that are needed here
00093     usableResources( QStringList() << icubResource << neuronsIteratorResource );
00094 }
00095 
00096 iCubArmJointsSensor::~iCubArmJointsSensor() {
00097     /* nothing to do */
00098 }
00099 
00100 void iCubArmJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
00101     iCubSensor::save( params, prefix );
00102     params.startObjectParameters( prefix, "iCubArmJointsSensor", this );
00103     params.createParameter( prefix, "arm", icubArm );
00104 }
00105 
00106 void iCubArmJointsSensor::describe( QString type ) {
00107     iCubSensor::describe( type );
00108     Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub arm" );
00109     d.describeEnum( "arm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The arm from which the joint angles are read" );
00110 }
00111 
00112 void iCubArmJointsSensor::update() {
00113     // Checking all resources we need exist
00114     checkAllNeededResourcesExist();
00115 
00116     // Acquiring the lock to get resources
00117     ResourcesLocker locker( this );
00118 
00119     QStringList values;
00120     for( int i=0; i<7; i++ ) {
00121         double value;
00122         icubMotors->getEncoder( i, &value );
00123         values << QString::number( value );
00124     }
00125     //exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
00126     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00127     evonetIt->setCurrentBlock( name() );
00128     for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
00129         double min, max, value;
00130         icubMotors->getEncoder(i, &value);
00131         icubMotors->getLimits(i,&min,&max);
00132         //normalizziamo i valori dei motori tra 0 ed 1;
00133         evonetIt->setInput( linearMap(value,min,max,0,1) );
00134     }
00135 }
00136 
00137 int iCubArmJointsSensor::size() {
00138     return 7;
00139 }
00140 
00141 void iCubArmJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00142     iCubSensor::resourceChanged(resourceName, changeType);
00143 
00144     if (changeType == Deleted) {
00145         return;
00146     }
00147 
00148     if (resourceName == icubResource) {
00149         iCubRobot* icub = getResource<iCubRobot>();
00150         if ( icubArm == "right" ) {
00151             icubMotors = icub->rightArmController();
00152         } else {
00153             icubMotors = icub->leftArmController();
00154         }
00155     } else if (resourceName == neuronsIteratorResource) {
00156         QString lbl;
00157         if ( icubArm == "right" ) {
00158             lbl = "R";
00159         } else {
00160             lbl = "L";
00161         }
00162 
00163         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00164         evonetIt->setCurrentBlock( name() );
00165         for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
00166             evonetIt->setGraphicProperties( lbl + QString("a") + QString::number(i), 0.0, 1.0, Qt::red );
00167         }
00168     } else {
00169         Logger::info("Unknown resource " + resourceName + " for " + name());
00170     }
00171 }
00172 
00173 ColorCameraSensor::ColorCameraSensor(ConfigurationParameters& params, QString prefix) :
00174     iCubSensor(params, prefix),
00175     nObjects(3)
00176 {
00177     nObjects = ConfigurationHelper::getInt(params, prefix + "nObjects", nObjects);
00178     // Declaring the resources that are needed here
00179     usableResources( QStringList() << icubResource << neuronsIteratorResource << "objects" );
00180 }
00181 
00182 void ColorCameraSensor::save( ConfigurationParameters& params, QString prefix ) {
00183     iCubSensor::save( params, prefix );
00184     params.startObjectParameters( prefix, "ColorCameraSensor", this );
00185     params.createParameter( prefix, "nObjects", QString::number(nObjects) );
00186 }
00187 
00188 void ColorCameraSensor::describe( QString type ) {
00189     iCubSensor::describe( type );
00190     Descriptor d = addTypeDescription( type, "Color Camera Sensor" );
00191     d.describeInt( "nObjects" ).def( 3 ).help( "Number of Objects" );
00192 }
00193 
00194 // update the camera on the basis of 3 objects that should be defined as red, green, and blue
00195 void ColorCameraSensor::update()
00196 {
00197     // Checking all resources we need exist
00198     checkAllNeededResourcesExist();
00199 
00200     // Acquiring the lock to get resources
00201     ResourcesLocker locker( this );
00202 
00203     QVector<WObject*>& objects = *(getResource<QVector<WObject*> >( "objects" ));
00204     iCubRobot* icub = getResource<iCubRobot>(icubResource);
00205     // Setting the eye matrix in the projector
00206     m_projector.setEyeMatrix(icub->headNeck()[4]->matrix());
00207 
00208     NeuronsIterator* evonetIt = getResource<NeuronsIterator>(neuronsIteratorResource);
00209     evonetIt->setCurrentBlock( name() );
00210     // Activating the three parts of the map: Red...
00211     for(int i = 0; i < nObjects; i++) {
00212         // Checking we don't try to access unexisting objects
00213         if (i >= objects.size()) {
00214             // Filling with 0.5
00215             // up-down
00216             evonetIt->setInput( 0.5 );
00217             evonetIt->nextNeuron();
00218             // right-left
00219             evonetIt->setInput( 0.5 );
00220             evonetIt->nextNeuron();
00221             continue;
00222         }
00223 
00224         // Computing the projection of the object on the retina
00225         m_projector.set3DPointWorld(objects[i]->matrix().w_pos);
00226 
00227         // If the object is within the retina, activating the map
00228         if (m_projector.pointInsideImage())
00229         {
00230             // mapPoint01.x.y = distance bteween the barycentre of the object and the border of the
00231             // field of view normalized in the range [0.0,1.0]
00232             const ImagePoint mapPoint01 = m_projector.getImagePoint01();
00233 
00234             // up-down
00235             //mapPoint01.y = distance normalized tra 0-1 baricentro oggetto e bord fieldofview
00236             evonetIt->setInput( 0.5 - mapPoint01.y );
00237             evonetIt->nextNeuron();
00238             // right-left
00239             evonetIt->setInput( 0.5 - mapPoint01.x );
00240             evonetIt->nextNeuron();
00241 
00242             // Storing the position on the retina of the current object
00243             m_objectsRetinaPosition.insert(objects[i],mapPoint01);
00244         } else {
00245             evonetIt->setInput( 0.0 );
00246             evonetIt->nextNeuron();
00247             evonetIt->setInput( 0.0 );
00248             evonetIt->nextNeuron();
00249             // Storing an invalid point for the current object
00250             m_objectsRetinaPosition.insert(objects[i], ImagePoint());
00251         }
00252     }
00253 }
00254 
00255 int ColorCameraSensor::size()
00256 {
00257     return nObjects*2;
00258 }
00259 
00260 void ColorCameraSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
00261 {
00262     iCubSensor::resourceChanged(resourceName, changeType);
00263 
00264     if (changeType == Deleted) {
00265         return;
00266     }
00267 
00268     if (resourceName == icubResource) {
00269         // Nothing to do here, we get the robot using getResource() in update()
00270     } else if (resourceName == neuronsIteratorResource) {
00271         QString lbl[2];
00272         lbl[0] = "H"; //horizontal
00273         lbl[1] = "V"; //vertical
00274         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00275         evonetIt->setCurrentBlock( name() );
00276         for (int obj = 0; obj < nObjects; obj++) {
00277             for (int i=0; i < 2; i++, evonetIt->nextNeuron()) {
00278                 QString label;
00279                 switch (obj) {
00280                     case 0:
00281                         label = QString("Cr")+ lbl[i];//QString::number(i);
00282                         break;
00283                     case 1:
00284                         label = QString("Cg")+lbl[i];//QString::number(i);
00285                         break;
00286                     case 2:
00287                         label = QString("Cb")+lbl[i];//QString::number(i);
00288                         break;
00289                     default:
00290                         label = QString::number(obj)+QString::number(i);
00291                         break;
00292                 }
00293                 evonetIt->setGraphicProperties( label, -1.0, 1.0, Qt::red );
00294             }
00295         }
00296     } else if (resourceName == "objects") {
00297         // Nothing to do here, we get objects using getResource() in update()
00298     } else {
00299         Logger::info("Unknown resource " + resourceName + " for " + name());
00300     }
00301 }
00302 
00303 //iCubPalmTargetDistSensor : begin implementation
00304 // it returns the distance between right or left palm and a defined target
00305 iCubPalmTargetDistSensor::iCubPalmTargetDistSensor(ConfigurationParameters &params, QString prefix) :
00306     iCubSensor(params, prefix) {
00307     icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
00308     targetName= ConfigurationHelper::getString( params, prefix+"target", "target" );
00309     QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
00310     QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
00311     if ( vec1.size() == 3 && vec2.size() == 3 ) {
00312         linearize = true;
00313         bbMin = wVector( vec1[0], vec1[1], vec1[2] );
00314         bbMax = wVector( vec2[0], vec2[1], vec2[2] );
00315     } else {
00316         linearize = false;
00317         if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
00318             Logger::warning( QString("iCubPalmTargetDistSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
00319         }
00320     }
00321     
00322     QVector<double> pal1 = ConfigurationHelper::getVector( params, prefix+"palmOffset" );
00323     if ( pal1.size() == 3 ) {
00324         addPalmOffset = true;
00325         palmOffset = wVector( pal1[0], pal1[1], pal1[2] );
00326     } else {
00327         addPalmOffset = false;
00328         if ( !pal1.isEmpty() ) {
00329             Logger::warning( QString("iCubPalmTargetDistSensor %1 - palmOffset parameter is not well specified; It will be ignored").arg(name()) );
00330         }
00331     }
00332 
00333     // Declaring the resources that are needed here
00334     usableResources( QStringList() << icubResource << targetName << neuronsIteratorResource );
00335 }
00336 
00337 iCubPalmTargetDistSensor::~iCubPalmTargetDistSensor() {
00338     /* nothing to do */
00339 }
00340 
00341 void iCubPalmTargetDistSensor::save(ConfigurationParameters &params, QString prefix)
00342 {
00343     iCubSensor::save( params, prefix );
00344     params.startObjectParameters( prefix, "iCubPalmTargetDistSensor", this );
00345     params.createParameter( prefix, "palm", icubPalm);
00346     params.createParameter( prefix, "target", targetName);
00347     if ( linearize ) {
00348         params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
00349         params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
00350     }
00351 }
00352 
00353 void iCubPalmTargetDistSensor::describe( QString type ) {
00354     iCubSensor::describe( type );
00355     Descriptor d = addTypeDescription( type, "Sensor for reading the distance between right or left palm and a specified target" );
00356     d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the distance to the target is computed" );
00357     d.describeString( "target" ).def( "target" ).help( "The name of the resource associated with the target object" );
00358     d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [-1,1]" );
00359     d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [-1,1]" );
00360     d.describeReal( "palmOffset" ).props( IsList ).help( "The offset respect to the palm on which the distance will be computed" );
00361 }
00362 
00363 void iCubPalmTargetDistSensor::update() {
00364     // Checking all resources we need exist
00365     checkAllNeededResourcesExist();
00366 
00367     // Acquiring the lock to get resources
00368     ResourcesLocker locker( this );
00369 
00370     iCubRobot* icub = getResource<iCubRobot>( icubResource );
00371     WObject* target = getResource<WObject>( targetName );
00372     wVector targetPosInICub = icub->matrix().untransformVector( target->matrix().w_pos );
00373     wVector palmPosInICub;
00374     if ( isLeft ) {
00375         wMatrix t2 = icub->leftArm()[6]->matrix();
00376         if ( addPalmOffset ) {
00377             t2.w_pos += t2.rotateVector( palmOffset );
00378         }
00379         palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
00380     } else {
00381         wMatrix t2 = icub->rightArm()[6]->matrix();
00382         if ( addPalmOffset ) {
00383             t2.w_pos += t2.rotateVector( palmOffset );
00384         }
00385         palmPosInICub = icub->matrix().untransformVector( t2.w_pos );
00386     }
00387 
00388     wVector distanceVec = palmPosInICub - targetPosInICub;
00389     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00390     evonetIt->setCurrentBlock( name() );
00391     for( int i=0; i<3; i++ ) {
00392         if ( linearize ) {
00393             // linearize into [-1,1]
00394             evonetIt->setInput( linearMap( distanceVec[i], bbMin[i], bbMax[i], -1, 1 ) );
00395         } else {
00396             evonetIt->setInput( distanceVec[i] );
00397         }
00398         evonetIt->nextNeuron();
00399     }
00400 }
00401 
00402 int iCubPalmTargetDistSensor::size() {
00403     return 3;
00404 }
00405 
00406 void iCubPalmTargetDistSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00407     iCubSensor::resourceChanged(resourceName, changeType);
00408 
00409     if (changeType == Deleted) {
00410         return;
00411     }
00412 
00413     if (resourceName == icubResource) {
00414         // Nothing to do here, we get the robot using getResource() in update()
00415     } else if (resourceName == neuronsIteratorResource) {
00416         QString lbl;
00417         if ( icubPalm == "right" ) {
00418             lbl="R";
00419             isLeft = false;
00420         } else {
00421             lbl="L";
00422             isLeft = true;
00423         }
00424 
00425         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00426         evonetIt->setCurrentBlock( name() );
00427         for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
00428             evonetIt->setGraphicProperties( lbl+QString("d")+QString::number(i), -1.0, 1.0, Qt::red );
00429         }
00430     } else if (resourceName == targetName) {
00431         // Nothing to do here, we get the taget using getResource() in update()
00432     } else {
00433         Logger::info("Unknown resource " + resourceName + " for " + name());
00434     }
00435 }
00436 
00437 //iCubPalmTargetDistSensor : end implementation
00438 
00439 // iCubPalmTouchSEnsor begin implementation
00440 iCubPalmTouchSensor::iCubPalmTouchSensor(ConfigurationParameters &params, QString prefix) :
00441     iCubSensor(params, prefix) {
00442     wPalm=NULL;
00443     objects=NULL;
00444     icubPalm = ConfigurationHelper::getString( params, prefix+"palm", "right" );
00445     // Declaring the resources that are needed here
00446     usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
00447 }
00448 
00449 void iCubPalmTouchSensor::describe( QString type ) {
00450     iCubSensor::describe( type );
00451     Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
00452     d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the touch sensor is read" );
00453     
00454 }
00455 void iCubPalmTouchSensor::update() {
00456     // Checking all resources we need exist
00457     checkAllNeededResourcesExist();
00458 
00459     // Acquiring the lock to get resources
00460     ResourcesLocker locker( this );
00461 
00462     double touch=0.0;
00463     //we should put following instruction in the resourceChanged method
00464     World *world = getResource<World>("world");
00465     if(objects!=NULL) {
00466         for(int i=0;i<objects->size();i++)
00467         {
00468             if(world->checkContacts((PhyObject*)wPalm,(PhyObject*)objects->at(i)))
00469                 touch=1.0;
00470         }
00471     }
00472     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00473     evonetIt->setCurrentBlock( name() );
00474     
00475     for( int i=0; i<size(); i++, evonetIt->nextNeuron() ) {
00476         evonetIt->setInput(touch);
00477     }
00478 }
00479 
00480 int iCubPalmTouchSensor::size() {
00481     return 1;
00482 }
00483 
00484 iCubPalmTouchSensor::~iCubPalmTouchSensor()
00485 {}
00486 
00487 void iCubPalmTouchSensor::save(ConfigurationParameters &params, QString prefix)
00488 {
00489     iCubSensor::save( params, prefix );
00490     params.startObjectParameters( prefix, "iCubPalmTouchSensor", this );
00491     params.createParameter( prefix, "palm", icubPalm);
00492     
00493 }
00494 
00495 void iCubPalmTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00496     iCubSensor::resourceChanged(resourceName, changeType);
00497 
00498     if (changeType == Deleted) {
00499         return;
00500     }
00501 
00502     if (resourceName == icubResource) {
00503         iCubRobot *icub = getResource<iCubRobot>();
00504 
00505         if ( icubPalm == "right" ) {
00506             wPalm = icub->rightArm()[6];
00507         } else {
00508             wPalm = icub->leftArm()[6];
00509         }
00510     } else if (resourceName == "world") {
00511         // Nothing to do here
00512     } else if (resourceName == "objects") {
00513         objects = getResource<QVector<WObject*> >();
00514     } else if (resourceName == neuronsIteratorResource) {
00515         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00516         evonetIt->setCurrentBlock( name() );
00517         for( int i=0; i<1; i++, evonetIt->nextNeuron() ) {
00518             if(icubPalm=="right") {
00519                 evonetIt->setGraphicProperties( QString("Rpt"), 0, 1, Qt::red );
00520             } else {
00521                 evonetIt->setGraphicProperties( QString("Lpt"), 0, 1, Qt::red );
00522             }
00523         }
00524     } else {
00525         Logger::info("Unknown resource " + resourceName + " for " + name());
00526     }
00527 }
00528 
00529 // iCubPalmTouchSEnsor end implementation
00530 
00531 iCubHandTouchSensor::iCubHandTouchSensor(ConfigurationParameters& params, QString prefix) :
00532     iCubSensor(params, prefix),
00533     m_icubHand("right"),
00534     m_checkAllObjects(true),
00535     m_world(NULL),
00536     m_icubArm(),
00537     m_objects(),
00538     m_icub(NULL)
00539 {
00540     m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
00541     m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", true);
00542     // Declaring the resources that are needed here
00543     usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
00544 
00545 }
00546 
00547 iCubHandTouchSensor::~iCubHandTouchSensor()
00548 {
00549 }
00550 
00551 void iCubHandTouchSensor::save(ConfigurationParameters& params, QString prefix)
00552 {
00553     iCubSensor::save( params, prefix );
00554     params.startObjectParameters(prefix, "iCubHandTouchSensor", this);
00555     params.createParameter(prefix, "hand", m_icubHand);
00556     params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
00557 }
00558 
00559 void iCubHandTouchSensor::describe(QString type)
00560 {
00561     iCubSensor::describe( type );
00562     Descriptor d = addTypeDescription(type, "Hand touch sensor", "The touch sensor of the iCub hand. There are six sensors: one on the palm and one for each figertip");
00563     d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
00564     d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
00565 }
00566 
00567 void iCubHandTouchSensor::update()
00568 {
00569     // Checking all resources we need exist
00570     checkAllNeededResourcesExist();
00571 
00572     // Acquiring the lock to get resources
00573     ResourcesLocker locker( this );
00574 
00575     m_objects = *(getResource<QVector<WObject*> >( "objects" ));
00576 
00577     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00578     evonetIt->setCurrentBlock(name());
00579 
00580     evonetIt->setInput(handPieceCollides(m_icubArm[6])); // Palm
00581     evonetIt->nextNeuron();
00582     evonetIt->setInput(handPieceCollides(m_icubArm[19])); // Index
00583     evonetIt->nextNeuron();
00584     evonetIt->setInput(handPieceCollides(m_icubArm[20])); // Middle
00585     evonetIt->nextNeuron();
00586     evonetIt->setInput(handPieceCollides(m_icubArm[21])); // Ring
00587     evonetIt->nextNeuron();
00588     evonetIt->setInput(handPieceCollides(m_icubArm[22])); // Little
00589     evonetIt->nextNeuron();
00590     evonetIt->setInput(handPieceCollides(m_icubArm[26])); // Thumb
00591     evonetIt->nextNeuron();
00592 }
00593 
00594 int iCubHandTouchSensor::size()
00595 {
00596     return 6;
00597 }
00598 
00599 void iCubHandTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
00600 {
00601     iCubSensor::resourceChanged(resourceName, changeType);
00602 
00603     if (changeType == Deleted) {
00604         return;
00605     }
00606 
00607     if (resourceName == icubResource) {
00608         m_icub = getResource<iCubRobot>();
00609         if (m_icubHand == "left") {
00610             m_icubArm = m_icub->leftArm();
00611         } else {
00612             m_icubArm = m_icub->rightArm();
00613         }
00614     } else if (resourceName == "world") {
00615         m_world = getResource<World>();
00616     } else if (resourceName == "objects") {
00617         // Nothing to do here, we get objects using getResource() in update()
00618     } else if (resourceName == neuronsIteratorResource) {
00619         QString lbl;
00620         if (m_icubHand == "left") {
00621             lbl = "L";
00622         } else {
00623             lbl = "R";
00624         }
00625 
00626         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00627         evonetIt->setCurrentBlock(name());
00628         evonetIt->setGraphicProperties(lbl + "pt", 0.0, 1.0, Qt::red);
00629         evonetIt->nextNeuron();
00630         evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
00631         evonetIt->nextNeuron();
00632         evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
00633         evonetIt->nextNeuron();
00634         evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
00635         evonetIt->nextNeuron();
00636         evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
00637         evonetIt->nextNeuron();
00638         evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
00639     } else {
00640         Logger::info("Unknown resource " + resourceName + " for " + name());
00641     }
00642 }
00643 
00644 double iCubHandTouchSensor::handPieceCollides(PhyObject* handPiece)
00645 {
00646     if (m_icub->isKinematic()) {
00647         for (int i = 0; i < m_objects.size(); i++) {
00648             PhyObject* obj = dynamic_cast<PhyObject*>(m_objects[i]);
00649             if ((obj != NULL) && (m_world->checkContacts(handPiece, (PhyObject*) m_objects[i]))) {
00650                 return 1.0;
00651             }
00652         }
00653 
00654         return 0.0;
00655     } else {
00656         // Taking the vector of contacts. If no contact is present, this returns an empty vector
00657         const contactVec& c = m_world->contacts()[handPiece];
00658 
00659         if (c.size() == 0) {
00660             return 0.0;
00661         } else if (m_checkAllObjects) {
00662             return 1.0;
00663         } else {
00664             for (int i = 0; i < m_objects.size(); i++) {
00665                 for (int j = 0; j < c.size(); j++) {
00666                     if (c[j].collide == m_objects[i]) {
00667                         return 1.0;
00668                     }
00669                 }
00670             }
00671 
00672             return 0.0;
00673         }
00674     }
00675 
00676     return 0.0;
00677 }
00678 
00679 // iCubTorsoJointsSensor begin implementation
00680 iCubTorsoJointsSensor::iCubTorsoJointsSensor(ConfigurationParameters &params, QString prefix) :
00681     iCubSensor(params, prefix) {
00682     // Declaring the resources that are needed here
00683     usableResources( QStringList() << icubResource << neuronsIteratorResource );
00684 }
00685 
00686 void iCubTorsoJointsSensor::describe( QString type ) {
00687     iCubSensor::describe( type );
00688     Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
00689     
00690 }
00691 
00692 void iCubTorsoJointsSensor::update() {
00693     // Checking all resources we need exist
00694     checkAllNeededResourcesExist();
00695 
00696     // Acquiring the lock to get resources
00697     ResourcesLocker locker( this );
00698 
00699     double minRot, maxRot;
00700     double minFlex, maxFlex;
00701     double curRot;
00702     double curFlex;
00703     
00704     icubMotors->getLimits(0, &minRot, &maxRot);
00705     icubMotors->getLimits(2, &minFlex, &maxFlex);
00706     icubMotors->getEncoder(0, &curRot);
00707     icubMotors->getEncoder(2, &curFlex);
00708 
00709     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00710     evonetIt->setCurrentBlock( name() );
00711     evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) * 2.0 - 1.0);
00712     evonetIt->nextNeuron();
00713     evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
00714     
00715 }
00716 
00717 int iCubTorsoJointsSensor::size() {
00718     return 2;
00719 }
00720 
00721 void iCubTorsoJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00722     iCubSensor::resourceChanged(resourceName, changeType);
00723 
00724     if (changeType == Deleted) {
00725         return;
00726     }
00727 
00728     if (resourceName == icubResource) {
00729         iCubRobot* icub = getResource<iCubRobot>();
00730         icubMotors = icub->torsoController();
00731     } else if (resourceName == neuronsIteratorResource) {
00732         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00733         evonetIt->setCurrentBlock( name() );
00734 
00735         evonetIt->setGraphicProperties( QString("t0"), 0, 1, Qt::red ); //rotation
00736         evonetIt->nextNeuron();
00737         evonetIt->setGraphicProperties( QString("t1"), 0, 1, Qt::red ); //flexion
00738         evonetIt->nextNeuron();
00739     } else {
00740         Logger::info("Unknown resource " + resourceName + " for " + name());
00741     }
00742 }
00743 
00744 iCubTorsoJointsSensor::~iCubTorsoJointsSensor()
00745 {}
00746 
00747 void iCubTorsoJointsSensor::save(ConfigurationParameters &params, QString prefix)
00748 {
00749     iCubSensor::save( params, prefix );
00750     params.startObjectParameters( prefix, "iCubTorsoJointsSensor", this );
00751     
00752     
00753 }
00754 // iCubTorsoJointsSensor end implementation
00755 
00756 // iCubHeadJointsSensor begin implementation
00757 iCubHeadJointsSensor::iCubHeadJointsSensor(ConfigurationParameters &params, QString prefix) :
00758     iCubSensor(params, prefix) {
00759     // Declaring the resources that are needed here
00760     usableResources( QStringList() << icubResource << neuronsIteratorResource );
00761 }
00762 
00763 iCubHeadJointsSensor::~iCubHeadJointsSensor()
00764 {}
00765 
00766 void iCubHeadJointsSensor::save(ConfigurationParameters &params, QString prefix)
00767 {
00768     iCubSensor::save( params, prefix );
00769     params.startObjectParameters( prefix, "iCubHeadJointsSensor", this );
00770 }
00771 
00772 void iCubHeadJointsSensor::describe( QString type ) {
00773     iCubSensor::describe( type );
00774     Descriptor d = addTypeDescription( type, "Sensor for reading head sensors" );
00775     
00776 }
00777 
00778 void iCubHeadJointsSensor::update() {
00779     // Checking all resources we need exist
00780     checkAllNeededResourcesExist();
00781 
00782     // Acquiring the lock to get resources
00783     ResourcesLocker locker( this );
00784 
00785     double minRot, maxRot;
00786     double minFlex, maxFlex;
00787     double curRot;
00788     double curFlex;
00789     
00790     icubMotors->getLimits(0, &minRot, &maxRot);
00791     icubMotors->getLimits(2, &minFlex, &maxFlex);
00792     icubMotors->getEncoder(0, &curRot);
00793     icubMotors->getEncoder(2, &curFlex);
00794 
00795     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00796     evonetIt->setCurrentBlock( name() );
00797     evonetIt->setInput(((curRot - minRot) / (maxRot - minRot)) );
00798     evonetIt->nextNeuron();
00799     evonetIt->setInput((curFlex - minFlex) / (maxFlex - minFlex));
00800     
00801 }
00802 
00803 int iCubHeadJointsSensor::size() {
00804     return 2;
00805 }
00806 
00807 void iCubHeadJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00808     iCubSensor::resourceChanged(resourceName, changeType);
00809 
00810     if (changeType == Deleted) {
00811         return;
00812     }
00813 
00814     if (resourceName == icubResource) {
00815         iCubRobot* icub = getResource<iCubRobot>();
00816         icubMotors = icub->headNeckController();
00817     } else if (resourceName == neuronsIteratorResource) {
00818         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00819         evonetIt->setCurrentBlock( name() );
00820 
00821         evonetIt->setGraphicProperties( QString("n0"), 0, 1, Qt::red ); //n stands for neck
00822         evonetIt->nextNeuron();
00823         evonetIt->setGraphicProperties( QString("n1"), 0, 1, Qt::red );
00824         evonetIt->nextNeuron();
00825     } else {
00826         Logger::info("Unknown resource " + resourceName + " for " + name());
00827     }
00828 }
00829 
00830 // iCubHeadJointsSensor end implementation
00831 
00832 // end iCubHandJointsSensor
00833 iCubHandJointsSensor::iCubHandJointsSensor( ConfigurationParameters& params, QString prefix ) :
00834     iCubSensor(params, prefix),
00835     icubMotors(NULL) {
00836     icubHand = ConfigurationHelper::getString( params, prefix+"hand", "right" );
00837     // Declaring the resources that are needed here
00838     usableResources( QStringList() << icubResource << neuronsIteratorResource );
00839 }
00840 
00841 iCubHandJointsSensor::~iCubHandJointsSensor() {
00842     /* nothing to do */
00843 }
00844 
00845 void iCubHandJointsSensor::save( ConfigurationParameters& params, QString prefix ) {
00846     iCubSensor::save( params, prefix );
00847     params.startObjectParameters( prefix, "iCubHandJointsSensor", this );
00848     params.createParameter( prefix, "hand", icubHand );
00849 }
00850 
00851 void iCubHandJointsSensor::describe( QString type ) {
00852     iCubSensor::describe( type );
00853     Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub hand" );
00854     d.describeEnum( "hand" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The hand from which the joint angles are read" );
00855 }
00856 
00857 void iCubHandJointsSensor::update() {
00858     // Checking all resources we need exist
00859     checkAllNeededResourcesExist();
00860 
00861     // Acquiring the lock to get resources
00862     ResourcesLocker locker( this );
00863 
00864 //  QStringList values;
00865 //  for( int i=9; i<16; i++ ) {
00866 //      double value;
00867 //      icubMotors->getEncoder( i, &value );
00868 //      values << QString::number( value );
00869 //  }
00870 //  exp->setStatus( QString("SENSOR Reading: <")+values.join(", ")+QString(">") );
00871     NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
00872     evonetIt->setCurrentBlock( name() );
00873     for( int i=9; i<16; i++, evonetIt->nextNeuron() ) {
00874         double min, max, value;
00875         icubMotors->getEncoder(i, &value);
00876         icubMotors->getLimits(i,&min,&max);
00877         //normalizziamo i valori dei motori tra 0 ed 1;
00878         evonetIt->setInput( linearMap(value,min,max,0,1) );
00879     }
00880 }
00881 
00882 int iCubHandJointsSensor::size() {
00883     return 7;
00884 }
00885 
00886 void iCubHandJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
00887     iCubSensor::resourceChanged(resourceName, changeType);
00888 
00889     if (changeType == Deleted) {
00890         return;
00891     }
00892 
00893     if (resourceName == icubResource) {
00894         iCubRobot* icub = getResource<iCubRobot>();
00895         if ( icubHand == "right" ) {
00896             icubMotors = icub->rightArmController();
00897         } else {
00898             icubMotors = icub->leftArmController();
00899         }
00900     } else if (resourceName == neuronsIteratorResource) {
00901         QString label;
00902         if ( icubHand == "right" ) {
00903             label="R";
00904         } else {
00905             label="L";
00906         }
00907 
00908         NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
00909         evonetIt->setCurrentBlock( name() );
00910         for( int i=0; i<7; i++, evonetIt->nextNeuron() ) {
00911             evonetIt->setGraphicProperties( label+QString("f")+QString::number(i), 0.0, 1.0, Qt::red ); //f stands for fingers
00912         }
00913     } else {
00914         Logger::info("Unknown resource " + resourceName + " for " + name());
00915     }
00916 }
00917 // end iCubHandJointsSensor
00918 
00919 namespace __PalmAndFingertipTouchSensor_internal {
00920     #ifndef GLMultMatrix
00921     #define GLMultMatrix glMultMatrixf
00922     // for double glMultMatrixd
00923     #endif
00924 
00928     const float epsilon = 0.0001f;
00929 
00936     const float numDivisionsFor90Degrees = 5.0f;
00937 
00941     const int maxNumContacts = 20;
00942 
00947     class FingertipTouchSensorGraphic : public GraphicalWObject
00948     {
00949     public:
00975         FingertipTouchSensorGraphic(PhyObject *handPiece, double alpha, double h, bool isRight, bool isThumb, QString name = "unamed") :
00976             GraphicalWObject(handPiece->world(), name),
00977             m_handPiece(dynamic_cast<PhyCylinder*>(handPiece)),
00978             m_alpha(alpha),
00979             m_h(h),
00980             m_alphaOffset(computeAlphaOffset(isRight, isThumb)),
00981             m_angularIncrement(m_alpha / (ceil(m_alpha / (M_PI / 2.0)) * numDivisionsFor90Degrees)),
00982             m_startingAngle((-m_alpha / 2.0) + m_alphaOffset),
00983             m_endingAngle((m_alpha / 2.0) + m_alphaOffset),
00984             m_isActive(false),
00985             m_isActiveMutex()
00986         {
00987             // Attaching to handPiece (which also becomes our owner)
00988             attachToObject(m_handPiece, true);
00989 
00990             // We also use our own color and texture
00991             setUseColorTextureOfOwner(false);
00992             setTexture("");
00993             setColor(Qt::cyan);
00994         }
00995 
00999         ~FingertipTouchSensorGraphic()
01000         {
01001         }
01002 
01010         void setActive(bool isActive)
01011         {
01012             m_isActiveMutex.lock();
01013                 m_isActive = isActive;
01014             m_isActiveMutex.unlock();
01015         }
01016 
01017     protected:
01026         virtual void render(RenderWObject* renderer, QGLContext* gw)
01027         {
01028             // First of all changing our color depending on the value of m_isActive
01029             m_isActiveMutex.lock();
01030                 if (m_isActive) {
01031                     setColor(Qt::red);
01032                 } else {
01033                     setColor(Qt::cyan);
01034                 }
01035             m_isActiveMutex.unlock();
01036 
01037             // Bringing the coordinate system on the fingerip
01038             wMatrix mtr = tm;
01039             mtr.w_pos += mtr.x_ax.scale(m_handPiece->height() / 2.0);
01040 
01041             glPushMatrix();
01042             renderer->container()->setupColorTexture(gw, renderer);
01043             GLMultMatrix(&mtr[0][0]);
01044 
01045             // Drawing the top part of the sensor
01046             glBegin(GL_TRIANGLES);
01047 
01048             // All normals here are along the x axis. All triangles have a vertex on
01049             // the axis of the cylinder
01050             const float adjustedRadius = m_handPiece->radius() + epsilon;
01051             for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
01052                 // Computing the next angle (we have to do this to avoid numerical errors)
01053                 const float nextAngle = angle + m_angularIncrement;
01054                 const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
01055                 glNormal3f(1.0, 0.0, 0.0);
01056                 glVertex3f(epsilon, 0.0, 0.0);
01057                 glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
01058                 glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
01059             }
01060             glEnd();
01061 
01062             // Now drawing the remaining part
01063             glBegin(GL_QUADS);
01064 
01065             // Here we have to compute the right normal for each face
01066             for (float angle = m_startingAngle; angle < m_endingAngle; angle += m_angularIncrement) {
01067                 // Computing the next angle (we have to do this to avoid numerical errors)
01068                 const float nextAngle = angle + m_angularIncrement;
01069                 const float effectiveNextAngle = ((nextAngle > m_endingAngle) ? m_endingAngle : nextAngle);
01070                 // To compute the normal we take two vectors along two adiacent sides of the quad, compute the cross
01071                 // product and then normalize it (the product order is important, of course)
01072                 const wVector v1(0.0, sin(angle) - sin(angle + m_angularIncrement), cos(angle) - cos(angle + m_angularIncrement));
01073                 const wVector v2(1.0, 0.0, 0.0);
01074                 const wVector normal = (v1 * v2).normalize();
01075                 glNormal3f(normal.x, normal.y, normal.z);
01076 
01077                 glVertex3f(epsilon, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
01078                 glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
01079                 glVertex3f(-m_h, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
01080                 glVertex3f(-m_h, adjustedRadius * sin(angle), adjustedRadius * cos(angle));
01081             }
01082             glEnd();
01083             glPopMatrix();
01084         }
01085 
01086         static float computeAlphaOffset(bool isRight, bool isThumb) {
01087             // The angle we use when drawing is respect to the z axis on the yz plane. Here we compute
01088             // the offsets needed in different cases (see the comment in the function
01089             // iCubFingertipsTouchSensor::goodCollisionPoint for more information about the frame of
01090             // references of the various fingers in the two hands)
01091             float offset = 0.0;
01092             if (isRight) {
01093                 offset = isThumb ? M_PI / 2.0 : 0.0;
01094             } else {
01095                 offset = isThumb ? M_PI / 2.0 : M_PI;
01096             }
01097 
01098             return offset;
01099         }
01100 
01105         PhyCylinder *const m_handPiece;
01106 
01110         const float m_alpha;
01111 
01115         const float m_h;
01116 
01124         const float m_alphaOffset;
01125 
01132         const float m_angularIncrement;
01133 
01137         const float m_startingAngle;
01138 
01142         const float m_endingAngle;
01143 
01149         bool m_isActive;
01150 
01157         QMutex m_isActiveMutex;
01158     };
01159 
01164     class PalmPatchesTouchSensorGraphic : public GraphicalWObject
01165     {
01166     public:
01186         PalmPatchesTouchSensorGraphic(PhyObject *handPalm, const QVector<iCubPalmPatchesTouchSensor::Triangle>& patches, bool isRight, QString name = "unamed") :
01187             GraphicalWObject(handPalm->world(), name),
01188             m_handPalm(dynamic_cast<PhyBox*>(handPalm)),
01189             m_patches(patches),
01190             m_isRight(isRight),
01191             m_zAxisDirection(isRight ? 1.0 : -1.0),
01192             m_activations(m_patches.size(), false),
01193             m_activationsMutex()
01194         {
01195             // Attaching to handPalm (which also becomes our owner)
01196             attachToObject(m_handPalm, true);
01197 
01198             // We also use our own color and texture
01199             setUseColorTextureOfOwner(false);
01200             setTexture("");
01201             setColor(Qt::cyan);
01202         }
01203 
01207         ~PalmPatchesTouchSensorGraphic()
01208         {
01209         }
01210 
01217         void setActivations(const QVector<bool> activations)
01218         {
01219             m_activationsMutex.lock();
01220                 m_activations = activations;
01221             m_activationsMutex.unlock();
01222         }
01223 
01224     protected:
01233         virtual void render(RenderWObject* renderer, QGLContext* gw)
01234         {
01235             // Copying the m_activations vector to a local vector to avoid concurrent accesses
01236             m_activationsMutex.lock();
01237                 const QVector<bool> activations(m_activations);
01238             m_activationsMutex.unlock();
01239 
01240             // We receive the list of triangles from the sensor, we just need to display them
01241             glPushMatrix();
01242             renderer->container()->setupColorTexture(gw, renderer);
01243             GLMultMatrix(&tm[0][0]);
01244 
01245             // First drawing the triangles making up the sensor
01246             glBegin(GL_TRIANGLES);
01247             glNormal3f(0.0, 0.0, m_zAxisDirection);
01248             for (int i = 0; i < m_patches.size(); i++) {
01249                 const iCubPalmPatchesTouchSensor::Triangle& t = m_patches[i];
01250 
01251                 QColor col;
01252                 if (activations[i]) {
01253                     col = Qt::red;
01254                 } else {
01255                     col = Qt::cyan;
01256                 }
01257                 glColor4f(col.redF(), col.greenF(), col.blueF(), col.alphaF());
01258 
01259                 glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
01260                 glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
01261                 glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
01262             }
01263             glEnd();
01264 
01265             // Now drawing the lines separating the triangles. Using the for we draw some line twice,
01266             // it's not a big problem
01267             glBegin(GL_LINES);
01268             glNormal3f(0.0, 0.0, m_zAxisDirection);
01269             glColor4f(0.0, 0.0, 0.0, 1.0);
01270             for (int i = 0; i < m_patches.size(); i++) {
01271                 const iCubPalmPatchesTouchSensor::Triangle& t = m_patches[i];
01272 
01273                 glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
01274                 glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
01275 
01276                 glVertex3f(t.b.x, t.b.y, t.b.z + m_zAxisDirection * epsilon);
01277                 glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
01278 
01279                 glVertex3f(t.c.x, t.c.y, t.c.z + m_zAxisDirection * epsilon);
01280                 glVertex3f(t.a.x, t.a.y, t.a.z + m_zAxisDirection * epsilon);
01281             }
01282             glEnd();
01283             glPopMatrix();
01284         }
01285 
01290         PhyBox *const m_handPalm;
01291 
01295         const QVector<iCubPalmPatchesTouchSensor::Triangle> m_patches;
01296 
01301         const bool m_isRight;
01302 
01310         const float m_zAxisDirection;
01311 
01319         QVector<bool> m_activations;
01320 
01327         QMutex m_activationsMutex;
01328     };
01329 }
01330 
01331 using namespace __PalmAndFingertipTouchSensor_internal;
01332 
01333 iCubFingertipsTouchSensor::iCubFingertipsTouchSensor(ConfigurationParameters& params, QString prefix) :
01334     iCubSensor(params, prefix),
01335     m_icubHand("right"),
01336     m_isRight(true),
01337     m_checkAllObjects(true),
01338     m_alpha(M_PI / 4.0), // 45°
01339     m_h(0.01),
01340     m_drawSensor(true),
01341     m_world(NULL),
01342     m_icubArm(),
01343     m_objects(NULL),
01344     m_icub(NULL),
01345     m_graphicalTouchSensors()
01346 {
01347     m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
01348     if (m_icubHand.toLower() == "right") {
01349         m_isRight = true;
01350     } else if (m_icubHand.toLower() == "left") {
01351         m_isRight = false;
01352     } else {
01353         ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
01354     }
01355     m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
01356     m_alpha = toRad(ConfigurationHelper::getDouble(params, prefix + "alpha", toDegree(m_alpha)));
01357     m_h = ConfigurationHelper::getDouble(params, prefix + "h", m_h);
01358     m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
01359 
01360     // Declaring the resources that are needed here
01361     usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
01362 }
01363 
01364 iCubFingertipsTouchSensor::~iCubFingertipsTouchSensor()
01365 {
01366     // Nothing to do here, renderers are destroyed by their owners
01367 }
01368 
01369 void iCubFingertipsTouchSensor::save(ConfigurationParameters& params, QString prefix)
01370 {
01371     iCubSensor::save(params, prefix);
01372     params.startObjectParameters(prefix, "iCubFingertipsTouchSensor", this);
01373     params.createParameter(prefix, "hand", m_icubHand);
01374     params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
01375     params.createParameter(prefix, "alpha", QString::number(toDegree(m_alpha)));
01376     params.createParameter(prefix, "h", QString::number(m_h));
01377     params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
01378 }
01379 
01380 void iCubFingertipsTouchSensor::describe(QString type)
01381 {
01382     iCubSensor::describe(type);
01383     Descriptor d = addTypeDescription(type, "Hand fingertips touch sensor", "The touch sensor of the iCub fingertips. There are five sensors, one for each figertip");
01384     d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
01385     d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
01386     d.describeReal("alpha").def(45.0).help("The aperture of the sensor surface", "The aperture angle of the sensor surface in degrees (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
01387     d.describeReal("h").def(0.01).help("The height of the sensor surface", "The height of the sensor surface (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
01388     d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
01389 }
01390 
01391 void iCubFingertipsTouchSensor::update()
01392 {
01393     // Checking all resources we need exist
01394     checkAllNeededResourcesExist();
01395 
01396     // Acquiring the lock to get resources
01397     ResourcesLocker locker(this);
01398 
01399     EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
01400     evonetIt->setCurrentBlock(name());
01401 
01402     // These are the indexes of fingertips in the vector of icub arm parts
01403     static const unsigned int indexes[] = {19, 20, 21, 22, 26};
01404     for (unsigned int i = 0; i < 5; i++) {
01405         // The thumb is the last index (26)
01406         const double collision = handPieceCollides(m_icubArm[indexes[i]], (i == 4) ? true : false);
01407         // If sensors are also drawn, we change the color of the sensor depending on whether it
01408         // collides with an object or not
01409         if (m_drawSensor) {
01410             m_graphicalTouchSensors[i]->setActive((collision > 0.5));
01411         }
01412         evonetIt->setInput(collision);
01413         evonetIt->nextNeuron();
01414     }
01415 }
01416 
01417 int iCubFingertipsTouchSensor::size()
01418 {
01419     return 5;
01420 }
01421 
01422 void iCubFingertipsTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
01423 {
01424     iCubSensor::resourceChanged(resourceName, changeType);
01425 
01426     if (changeType == Deleted) {
01427         return;
01428     }
01429 
01430     if (resourceName == icubResource) {
01431         m_icub = getResource<iCubRobot>();
01432         if (m_isRight) {
01433             m_icubArm = m_icub->rightArm();
01434         } else {
01435             m_icubArm = m_icub->leftArm();
01436         }
01437 
01438         // Checking if we have to draw the sensors. This is here because it requires a pointer to icub parts
01439         if (m_drawSensor) {
01440             m_graphicalTouchSensors.clear();
01441 
01442             // Creating graphical objects representing the touch sensors areas. They will set the finger piece as
01443             // their owner so that the destruction of the objects is handled by them
01444             m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[19], m_alpha, m_h, m_isRight, false)); // Index
01445             m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[20], m_alpha, m_h, m_isRight, false)); // Middle
01446             m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[21], m_alpha, m_h, m_isRight, false)); // Ring
01447             m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[22], m_alpha, m_h, m_isRight, false)); // Little
01448             m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[26], m_alpha, m_h, m_isRight, true)); // Thumb
01449         }
01450     } else if (resourceName == neuronsIteratorResource) {
01451         QString lbl;
01452         if (m_isRight) {
01453             lbl = "R";
01454         } else {
01455             lbl = "L";
01456         }
01457 
01458         EvonetIterator* evonetIt = getResource<EvonetIterator>();
01459         evonetIt->setCurrentBlock(name());
01460         evonetIt->setGraphicProperties(lbl + "f1", 0.0, 1.0, Qt::red);
01461         evonetIt->nextNeuron();
01462         evonetIt->setGraphicProperties(lbl + "f2", 0.0, 1.0, Qt::red);
01463         evonetIt->nextNeuron();
01464         evonetIt->setGraphicProperties(lbl + "f3", 0.0, 1.0, Qt::red);
01465         evonetIt->nextNeuron();
01466         evonetIt->setGraphicProperties(lbl + "f4", 0.0, 1.0, Qt::red);
01467         evonetIt->nextNeuron();
01468         evonetIt->setGraphicProperties(lbl + "f5", 0.0, 1.0, Qt::red);
01469         evonetIt->nextNeuron();
01470     } else if (resourceName == "objects") {
01471         m_objects = getResource<QVector<WObject*> >();
01472     } else if (resourceName == "world") {
01473         m_world = getResource<World>();
01474     } else {
01475         Logger::info("Unknown resource " + resourceName + " for " + name());
01476     }
01477 }
01478 
01479 double iCubFingertipsTouchSensor::handPieceCollides(PhyObject* handPiece, bool isThumb) const
01480 {
01481     if (m_icub->isKinematic() || !m_checkAllObjects) {
01482         for (int i = 0; i < m_objects->size(); i++) {
01483             PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
01484             QVector<wVector> contacts;
01485             if ((obj != NULL) && (m_world->smartCheckContacts(handPiece, (PhyObject*) m_objects->at(i), maxNumContacts, &contacts))) {
01486                 for (int j = 0; j < contacts.size(); j++) {
01487                     if (goodCollisionPoint(handPiece, contacts[j], isThumb)) {
01488                         return 1.0;
01489                     }
01490                 }
01491             }
01492         }
01493 
01494         return 0.0;
01495     } else {
01496         // Taking the vector of contacts. If no contact is present, this returns an empty vector
01497         const contactVec& c = m_world->contacts()[handPiece];
01498 
01499         for (int i = 0; i < c.size(); i++) {
01500             if (goodCollisionPoint(handPiece, c[i].pos, isThumb)) {
01501                 return 1.0;
01502             }
01503         }
01504 
01505         return 0.0;
01506     }
01507 
01508     return 0.0;
01509 }
01510 
01511 bool iCubFingertipsTouchSensor::goodCollisionPoint(PhyObject* handPiece, const wVector& collisionPoint, bool isThumb) const
01512 {
01513     // The various fingertips have frame of references with different orientations, so the direction towards
01514     // the palm (i.e. where the touch sensor area lies) is along different axes:
01515     //  - right hand:
01516     //      - thumb: +y axis
01517     //      - all other fingers: +z axis
01518     //  - left hand:
01519     //      - thumb: +y axis
01520     //      - all other fingers: -z axis
01521     // Here we calculate the angle on the yz plane, but the 0 angle is on different axes depending on the
01522     // hand piece, as in the list above
01523 
01524     float angle;
01525     if (isThumb) {
01526         angle = atan2(collisionPoint.z, collisionPoint.y);
01527     } else {
01528         if (m_isRight) {
01529             angle = atan2(collisionPoint.y, collisionPoint.z);
01530         } else {
01531             angle = atan2(collisionPoint.y, -collisionPoint.z);
01532         }
01533     }
01534     // Also computing the distance from the fingertip (to ease the check below)
01535     const float distFromFingertip = (dynamic_cast<PhyCylinder*>(handPiece))->height() / 2.0 - collisionPoint.x;
01536 
01537     // Checking if the collision point is good
01538     if ((angle >= -m_alpha) && (angle <= m_alpha) && (distFromFingertip <= m_h)) {
01539         return true;
01540     }
01541 
01542     return false;
01543 }
01544 
01545 iCubPalmPatchesTouchSensor::iCubPalmPatchesTouchSensor(ConfigurationParameters& params, QString prefix) :
01546     iCubSensor(params, prefix),
01547     m_icubHand("right"),
01548     m_isRight(true),
01549     m_checkAllObjects(true),
01550     m_drawSensor(true),
01551     m_world(NULL),
01552     m_icubArm(),
01553     m_objects(NULL),
01554     m_icub(NULL),
01555     m_handPalm(NULL),
01556     m_patches(),
01557     m_graphicalTouchSensor(NULL)
01558 {
01559     m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", m_icubHand);
01560     if (m_icubHand.toLower() == "right") {
01561         m_isRight = true;
01562     } else if (m_icubHand.toLower() == "left") {
01563         m_isRight = false;
01564     } else {
01565         ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
01566     }
01567     m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
01568     m_drawSensor = ConfigurationHelper::getBool(params, prefix + "drawSensor", m_drawSensor);
01569 
01570     // Declaring the resources that are needed here
01571     usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
01572 }
01573 
01574 iCubPalmPatchesTouchSensor::~iCubPalmPatchesTouchSensor()
01575 {
01576     // Nothing to do here, the renderer is destroyed by its owners
01577 }
01578 
01579 void iCubPalmPatchesTouchSensor::save(ConfigurationParameters& params, QString prefix)
01580 {
01581     iCubSensor::save(params, prefix);
01582     params.startObjectParameters(prefix, "iCubPalmPatchesTouchSensor", this);
01583     params.createParameter(prefix, "hand", m_icubHand);
01584     params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
01585     params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
01586 }
01587 
01588 void iCubPalmPatchesTouchSensor::describe(QString type)
01589 {
01590     iCubSensor::describe(type);
01591     Descriptor d = addTypeDescription(type, "Hand palm touch sensor", "The touch sensor of the iCub hand palm. There are four sensors, roughly in the same positions of the four patches on the real iCub hand");
01592     d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
01593     d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
01594     d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
01595 }
01596 
01597 void iCubPalmPatchesTouchSensor::update()
01598 {
01599     // Checking all resources we need exist
01600     checkAllNeededResourcesExist();
01601 
01602     // Acquiring the lock to get resources
01603     ResourcesLocker locker(this);
01604 
01605     EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
01606     evonetIt->setCurrentBlock(name());
01607 
01608     // First of all we have to get the list of collision points
01609     QVector<wVector> contacts;
01610     if (m_icub->isKinematic() || !m_checkAllObjects) {
01611         // Checking contacts with objects in the list. Appending all contacts to the contacts vector
01612         for (int i = 0; i < m_objects->size(); i++) {
01613             PhyObject* obj = dynamic_cast<PhyObject*>(m_objects->at(i));
01614             QVector<wVector> contactsWithObj;
01615             if (obj != NULL) {
01616                 m_world->smartCheckContacts(m_handPalm, (PhyObject*) m_objects->at(i), maxNumContacts, &contactsWithObj);
01617                 contacts << contactsWithObj;
01618             }
01619         }
01620     } else {
01621         // Taking the vector of contacts. If no contact is present, this returns an empty vector
01622         const contactVec& c = m_world->contacts()[m_handPalm];
01623 
01624         for (int i = 0; i < c.size(); i++) {
01625             contacts.append(c[i].pos);
01626         }
01627     }
01628 
01629     // Now we have to check each contact point for each triangle. We also save activations into a QVector
01630     // if the sensor is drawn
01631     QVector<bool> activations;
01632     if (m_drawSensor) {
01633         activations.fill(false, m_patches.size());
01634     }
01635     for (int i = 0; i < m_patches.size(); i++) {
01636         float activation = 0.0;
01637         for (int j = 0; j < contacts.size(); j++) {
01638             if (pointInPalmTriangle(contacts[j], m_patches[i])) {
01639                 activation = 1.0;
01640                 if (m_drawSensor) {
01641                     activations[i] = true;
01642                 }
01643                 break;
01644             }
01645         }
01646         evonetIt->setInput(activation);
01647         evonetIt->nextNeuron();
01648     }
01649     if (m_drawSensor) {
01650         m_graphicalTouchSensor->setActivations(activations);
01651     }
01652 }
01653 
01654 int iCubPalmPatchesTouchSensor::size()
01655 {
01656     // The number of patches is always 4
01657     return 4;
01658 }
01659 
01660 void iCubPalmPatchesTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
01661 {
01662     iCubSensor::resourceChanged(resourceName, changeType);
01663 
01664     if (changeType == Deleted) {
01665         return;
01666     }
01667 
01668     if (resourceName == icubResource) {
01669         m_icub = getResource<iCubRobot>();
01670         if (m_isRight) {
01671             m_icubArm = m_icub->rightArm();
01672         } else {
01673             m_icubArm = m_icub->leftArm();
01674         }
01675         m_handPalm = dynamic_cast<PhyBox*>(m_icubArm[6]);
01676 
01677         // Creating the list of triangles. The palmDirection constant is needed because the right and
01678         // left hand have different frame of references: in the right hand the palm is towards +z, while
01679         // in the left hand it is towards -z. The patches center is not in the middle of the palm along
01680         // the y axis, it is slighly moved torads the side opposite to the thumb
01681         const float palmDirection = m_isRight ? +1.0 : -1.0;
01682         const float triangleSide = m_handPalm->sideX() / 2.0f;
01683         const float triangleHalfSide = triangleSide / 2.0f;
01684         const float triangleHeight = sqrt((triangleSide * triangleSide) - (triangleHalfSide * triangleHalfSide));
01685         const float palmZ = palmDirection * m_handPalm->sideZ() / 2.0;
01686         const float palmCenterY = (m_handPalm->sideY() / 2.0) - triangleHeight * 1.1;
01687         Triangle t;
01688         m_patches.clear();
01689 
01690         t.a = wVector(0.0, palmCenterY, palmZ);
01691         t.b = wVector(triangleSide, palmCenterY, palmZ);
01692         t.c = wVector(triangleHalfSide, palmCenterY - triangleHeight, palmZ);
01693         m_patches.append(t);
01694 
01695         t.a = wVector(0.0, palmCenterY, palmZ);
01696         t.b = wVector(triangleSide, palmCenterY, palmZ);
01697         t.c = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
01698         m_patches.append(t);
01699 
01700         t.a = wVector(0.0, palmCenterY, palmZ);
01701         t.b = wVector(triangleHalfSide, palmCenterY + triangleHeight, palmZ);
01702         t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
01703         m_patches.append(t);
01704 
01705         t.a = wVector(0.0, palmCenterY, palmZ);
01706         t.b = wVector(-triangleSide, palmCenterY, palmZ);
01707         t.c = wVector(-triangleHalfSide, palmCenterY + triangleHeight, palmZ);
01708         m_patches.append(t);
01709 
01710         if (m_drawSensor) {
01711             m_graphicalTouchSensor = new PalmPatchesTouchSensorGraphic(m_handPalm, m_patches, m_isRight);
01712         }
01713     } else if (resourceName == neuronsIteratorResource) {
01714         QString lbl;
01715         if (m_isRight) {
01716             lbl = "R";
01717         } else {
01718             lbl = "L";
01719         }
01720 
01721         EvonetIterator* evonetIt = getResource<EvonetIterator>();
01722         evonetIt->setCurrentBlock(name());
01723         evonetIt->setGraphicProperties(lbl + "p1", 0.0, 1.0, Qt::red);
01724         evonetIt->nextNeuron();
01725         evonetIt->setGraphicProperties(lbl + "p2", 0.0, 1.0, Qt::red);
01726         evonetIt->nextNeuron();
01727         evonetIt->setGraphicProperties(lbl + "p3", 0.0, 1.0, Qt::red);
01728         evonetIt->nextNeuron();
01729         evonetIt->setGraphicProperties(lbl + "p4", 0.0, 1.0, Qt::red);
01730         evonetIt->nextNeuron();
01731     } else if (resourceName == "objects") {
01732         m_objects = getResource<QVector<WObject*> >();
01733     } else if (resourceName == "world") {
01734         m_world = getResource<World>();
01735     } else {
01736         Logger::info("Unknown resource " + resourceName + " for " + name());
01737     }
01738 }
01739 
01740 bool iCubPalmPatchesTouchSensor::pointInPalmTriangle(const wVector& point, const Triangle& triangle) const
01741 {
01742     // Checking that the point is on the palm side of the hand
01743     if (((m_isRight) && (point.z < 0.0)) || ((!m_isRight) && (point.z > 0.0))) {
01744         return false;
01745     }
01746 
01747     // The algorithm used here uses Barycentric Coordinates to check if a point is inside a triangle or not.
01748     // You can find mode information at the following links:
01749     //  http://en.wikipedia.org/wiki/Barycentric_coordinates_(mathematics)
01750     //  http://www.blackpawn.com/texts/pointinpoly/default.html
01751     // The version implemented here is directly taken from the second link (an offline version is in the
01752     // documentation). We discard the z coordinate (and do computations in 2D) because the check on z has
01753     // already been done before
01754 
01755     // Compute vectors
01756     const float v0x = triangle.c.x - triangle.a.x;
01757     const float v0y = triangle.c.y - triangle.a.y;
01758     const float v1x = triangle.b.x - triangle.a.x;
01759     const float v1y = triangle.b.y - triangle.a.y;
01760     const float v2x = point.x - triangle.a.x;
01761     const float v2y = point.y - triangle.a.y;
01762 
01763     // Compute dot products
01764     const float dot00 = v0x * v0x + v0y * v0y;
01765     const float dot01 = v0x * v1x + v0y * v1y;
01766     const float dot02 = v0x * v2x + v0y * v2y;
01767     const float dot11 = v1x * v1x + v1y * v1y;
01768     const float dot12 = v1x * v2x + v1y * v2y;
01769 
01770     // Compute barycentric coordinates
01771     const float invDenom = 1.0 / (dot00 * dot11 - dot01 * dot01);
01772     const float u = (dot11 * dot02 - dot01 * dot12) * invDenom;
01773     const float v = (dot00 * dot12 - dot01 * dot02) * invDenom;
01774 
01775     // Check if point is in triangle
01776     return (u >= 0) && (v >= 0) && (u + v < 1);
01777 }
01778 
01779 HandObjectVisualOffsetSensor::HandObjectVisualOffsetSensor(ConfigurationParameters& params, QString prefix) :
01780     iCubSensor(params, prefix),
01781     m_icubHand("right"),
01782     m_world(NULL),
01783     m_eye(NULL),
01784     m_hand(NULL),
01785     m_object(NULL)
01786 {
01787     m_icubHand = ConfigurationHelper::getString(params, prefix + "hand", "right");
01788 
01789     // Declaring the resources that are needed here
01790     usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
01791 }
01792 
01793 HandObjectVisualOffsetSensor::~HandObjectVisualOffsetSensor()
01794 {
01795 }
01796 
01797 void HandObjectVisualOffsetSensor::save(ConfigurationParameters& params, QString prefix)
01798 {
01799     iCubSensor::save(params, prefix);
01800 
01801     params.startObjectParameters(prefix, "HandObjectVisualOffsetSensor", this);
01802     params.createParameter(prefix, "hand", m_icubHand);
01803 }
01804 
01805 void HandObjectVisualOffsetSensor::describe(QString type)
01806 {
01807     iCubSensor::describe(type);
01808     Descriptor d = addTypeDescription(type, "Visual offset between the hand and the object", "This sensor computes the distance between the hand and the first object in the visual field of the robot. Returns the distances on the vertical and horizontal axes");
01809     d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
01810 }
01811 
01812 void HandObjectVisualOffsetSensor::update()
01813 {
01814     // Checking all resources we need exist
01815     checkAllNeededResourcesExist();
01816 
01817     // Acquiring the lock to get resources
01818     ResourcesLocker locker(this);
01819 
01820     // We get this here because we are not notified if the vector changes (i.e. elemets are added or deleted),
01821     // only if the vector is replaced with another vector (and that doesn't happend)
01822     m_object = (*(getResource<QVector<WObject*> >("objects")))[0];
01823 
01824     // Setting the eye matrix in the projector
01825     Projector projector;
01826     projector.setEyeMatrix(m_eye->matrix());
01827 
01828     // Computing the projection of the object on the retina
01829     projector.set3DPointWorld(m_object->matrix().w_pos);
01830     const ImagePoint objPos = projector.getImagePoint01();
01831 
01832     // Computing the projection of the hand on the retina
01833     projector.set3DPointWorld(m_hand->matrix().w_pos);
01834     const ImagePoint handPos = projector.getImagePoint01();
01835 
01836     double dx, dy;
01837     if(objPos.isValid() && handPos.isValid()) {
01838         dx = objPos.x - handPos.x;
01839         dx = tanh(5*dx);
01840 
01841         dy = objPos.y - handPos.y;
01842         dy = tanh(5*dx);
01843     } else {
01844         dx = 0;
01845         dy = 0;
01846     }
01847 
01848     EvonetIterator* evonetIt = getResource<EvonetIterator>(neuronsIteratorResource);
01849     evonetIt->setCurrentBlock(name());
01850     evonetIt->setInput(dx);
01851     evonetIt->nextNeuron();
01852     evonetIt->setInput(dy);
01853 }
01854 
01855 int HandObjectVisualOffsetSensor::size()
01856 {
01857     return 2;
01858 }
01859 
01860 void HandObjectVisualOffsetSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
01861 {
01862     iCubSensor::resourceChanged(resourceName, changeType);
01863 
01864     if (changeType == Deleted) {
01865         return;
01866     }
01867 
01868     if (resourceName == icubResource) {
01869         iCubRobot* icub = getResource<iCubRobot>();
01870         m_eye = icub->headNeck()[4];
01871         if (m_icubHand == "left") {
01872             m_hand = icub->leftArm()[6];
01873         } else {
01874             m_hand = icub->rightArm()[6];
01875         }
01876     } else if (resourceName == neuronsIteratorResource) {
01877         EvonetIterator* evonetIt = getResource<EvonetIterator>();
01878         evonetIt->setCurrentBlock(name());
01879         evonetIt->setGraphicProperties("dx", -1.0, 1.0, Qt::red);
01880         evonetIt->nextNeuron();
01881         evonetIt->setGraphicProperties("dy", -1.0, 1.0, Qt::red);
01882     } else if (resourceName == "objects") {
01883         // Nothing to do here, we get objects with getResource() in update()
01884     } else if (resourceName == "world") {
01885         m_world = getResource<World>();
01886     } else {
01887         Logger::info("Unknown resource " + resourceName + " for " + name());
01888     }
01889 }
01890 
01891 } // end namespace farsa
01892 
01893 #endif