icubsensors.cpp
42 neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", neuronsIteratorResource);
71 d.describeString("icub").def("robot").help("the name of the resource associated with the iCub robot to use (default is \"robot\")");
72 d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
108 Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub arm" );
109 d.describeEnum( "arm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The arm from which the joint angles are read" );
141 void iCubArmJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
305 iCubPalmTargetDistSensor::iCubPalmTargetDistSensor(ConfigurationParameters ¶ms, QString prefix) :
318 Logger::warning( QString("iCubPalmTargetDistSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
329 Logger::warning( QString("iCubPalmTargetDistSensor %1 - palmOffset parameter is not well specified; It will be ignored").arg(name()) );
348 params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
349 params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
355 Descriptor d = addTypeDescription( type, "Sensor for reading the distance between right or left palm and a specified target" );
356 d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the distance to the target is computed" );
357 d.describeString( "target" ).def( "target" ).help( "The name of the resource associated with the target object" );
358 d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [-1,1]" );
359 d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [-1,1]" );
360 d.describeReal( "palmOffset" ).props( IsList ).help( "The offset respect to the palm on which the distance will be computed" );
406 void iCubPalmTargetDistSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
446 usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
451 Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
452 d.describeEnum( "palm" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The palm from which the touch sensor is read" );
495 void iCubPalmTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
543 usableResources( QStringList() << icubResource << "world" << "objects" << neuronsIteratorResource );
556 params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
562 Descriptor d = addTypeDescription(type, "Hand touch sensor", "The touch sensor of the iCub hand. There are six sensors: one on the palm and one for each figertip");
563 d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
564 d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
688 Descriptor d = addTypeDescription( type, "Sensor for reading right or left palm touch sensor" );
721 void iCubTorsoJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
807 void iCubHeadJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
853 Descriptor d = addTypeDescription( type, "Sensor for reading the joint angles of an iCub hand" );
854 d.describeEnum( "hand" ).def( "right" ).values( QStringList()<<"right"<<"left" ).props( IsMandatory ).help( "The hand from which the joint angles are read" );
886 void iCubHandJointsSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
911 evonetIt->setGraphicProperties( label+QString("f")+QString::number(i), 0.0, 1.0, Qt::red ); //f stands for fingers
977 FingertipTouchSensorGraphic(PhyObject *handPiece, double alpha, double h, bool isRight, bool isThumb, QString name = "unamed") :
1062 glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1074 // To compute the normal we take two vectors along two adiacent sides of the quad, compute the cross
1076 const wVector v1(0.0, sin(angle) - sin(angle + m_angularIncrement), cos(angle) - cos(angle + m_angularIncrement));
1082 glVertex3f(epsilon, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1083 glVertex3f(-m_h, adjustedRadius * sin(effectiveNextAngle), adjustedRadius * cos(effectiveNextAngle));
1202 PalmPatchesTouchSensorGraphic(PhyObject *handPalm, const QVector<iCubPalmPatchesTouchSensor::Triangle>& patches, bool isRight, QString name = "unamed") :
1349 iCubFingertipsTouchSensor::iCubFingertipsTouchSensor(ConfigurationParameters& params, QString prefix) :
1369 ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1371 m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1377 usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1390 params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1393 params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1399 Descriptor d = addTypeDescription(type, "Hand fingertips touch sensor", "The touch sensor of the iCub fingertips. There are five sensors, one for each figertip");
1400 d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1401 d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1402 d.describeReal("alpha").def(45.0).help("The aperture of the sensor surface", "The aperture angle of the sensor surface in degrees (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1403 d.describeReal("h").def(0.01).help("The height of the sensor surface", "The height of the sensor surface (see the \"Fingertip Touch Sensor.png\" image for a graphical representation of sensor surface dimensions)");
1404 d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1438 void iCubFingertipsTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1454 // Checking if we have to draw the sensors. This is here because it requires a pointer to icub parts
1458 // Creating graphical objects representing the touch sensors areas. They will set the finger piece as
1460 m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[19], m_alpha, m_h, m_isRight, false)); // Index
1461 m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[20], m_alpha, m_h, m_isRight, false)); // Middle
1462 m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[21], m_alpha, m_h, m_isRight, false)); // Ring
1463 m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[22], m_alpha, m_h, m_isRight, false)); // Little
1464 m_graphicalTouchSensors.append(new FingertipTouchSensorGraphic(m_icubArm[26], m_alpha, m_h, m_isRight, true)); // Thumb
1501 if ((obj != NULL) && (m_world->smartCheckContacts(handPiece, (PhyObject*) m_objects->at(i), maxNumContacts, &contacts))) {
1527 bool iCubFingertipsTouchSensor::goodCollisionPoint(PhyObject* handPiece, const wVector& collisionPoint, bool isThumb) const
1529 // The various fingertips have frame of references with different orientations, so the direction towards
1537 // Here we calculate the angle on the yz plane, but the 0 angle is on different axes depending on the
1551 const float distFromFingertip = (dynamic_cast<PhyCylinder*>(handPiece))->height() / 2.0 - collisionPoint.x;
1561 iCubPalmPatchesTouchSensor::iCubPalmPatchesTouchSensor(ConfigurationParameters& params, QString prefix) :
1581 ConfigurationHelper::throwUserConfigError(prefix + "hand", m_icubHand, "The hand parameter must be either \"right\" or \"left\" (case insensitive)");
1583 m_checkAllObjects = ConfigurationHelper::getBool(params, prefix + "checkAllObjects", m_checkAllObjects);
1587 usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1600 params.createParameter(prefix, "checkAllObjects", m_checkAllObjects ? QString("true") : QString("false"));
1601 params.createParameter(prefix, "drawSensor", m_drawSensor ? QString("true") : QString("false"));
1607 Descriptor d = addTypeDescription(type, "Hand palm touch sensor", "The touch sensor of the iCub hand palm. There are four sensors, roughly in the same positions of the four patches on the real iCub hand");
1608 d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1609 d.describeBool("checkAllObjects").def(true).help("Wheter to check collisions with all objects or not", "If true, the collision of the hand touch sensors with all objects in the world is checked, otherwise only those in the objects vector are taken into account. Note that if the iCub is kinematic, only collisions with objects in the objects vector are checked regardless of the value of this parameter.");
1610 d.describeBool("drawSensor").def(true).help("Whether to draw sensor areas", "If true areas corresponding to the touch sensor surface are drawn on the fingertips when doing graphical simulations");
1632 m_world->smartCheckContacts(m_handPalm, (PhyObject*) m_objects->at(i), maxNumContacts, &contactsWithObj);
1645 // Now we have to check each contact point for each triangle. We also save activations into a QVector
1676 void iCubPalmPatchesTouchSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
1694 // left hand have different frame of references: in the right hand the palm is towards +z, while
1695 // in the left hand it is towards -z. The patches center is not in the middle of the palm along
1700 const float triangleHeight = sqrt((triangleSide * triangleSide) - (triangleHalfSide * triangleHalfSide));
1756 bool iCubPalmPatchesTouchSensor::pointInPalmTriangle(const wVector& point, const Triangle& triangle) const
1763 // The algorithm used here uses Barycentric Coordinates to check if a point is inside a triangle or not.
1767 // The version implemented here is directly taken from the second link (an offline version is in the
1768 // documentation). We discard the z coordinate (and do computations in 2D) because the check on z has
1795 HandObjectVisualOffsetSensor::HandObjectVisualOffsetSensor(ConfigurationParameters& params, QString prefix) :
1806 usableResources(QStringList() << icubResource << neuronsIteratorResource << "objects" << "world");
1824 Descriptor d = addTypeDescription(type, "Visual offset between the hand and the object", "This sensor computes the distance between the hand and the first object in the visual field of the robot. Returns the distances on the vertical and horizontal axes");
1825 d.describeEnum("hand").def("right").values(QStringList() << "right" << "left").props(IsMandatory).help("The hand to use", "The hand whose distance from the object should be returned. Choose between \"right\" and \"left\"");
1836 // We get this here because we are not notified if the vector changes (i.e. elemets are added or deleted),