sensors.cpp
45 m_neuronsIteratorResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator"))),
46 m_additionalInputsResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "additionalInputsResource", "additionalInputs"))),
79 Descriptor d = addTypeDescription(type, "Adds input neurons that can be used for custom operations", "With this sensor you can specify how many additional inputs are needed in the controller. This also declares a resource that can be used to access the additional inputs");
80 d.describeInt("additionalInputs").def(1).limits(1,100).props(IsMandatory).help("The number of additional inputs that will be added to the controller (default 1)");
81 d.describeString("neuronsIterator").def("neuronsIterator").help("The name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
82 d.describeString("additionalInputsResource").def("additionalInputs").help("The name of the resource associated with the vector of additional inputs (default is \"additionalInputs\")");
135 neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
146 Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
160 Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
161 d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
162 d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
163 d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
164 d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
192 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
218 params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
219 params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
261 LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
385 const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
490 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
495 m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
552 // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
553 // are used to store the current portion of the receptor for which we already know the color, while
554 // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
605 // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
618 objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
620 // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
626 // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
638 colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
641 colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
644 if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
645 colorsRangesAndDistances.append(ColorRangeAndDistance(color, max(minAngle, m_apertureMin), min(maxAngle, m_apertureMax), distance));
654 // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
656 colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
658 // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
661 for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
664 const int maxIndex = min(double(m_numReceptors - 1), floor((it->maxAngle - m_apertureMin) / m_receptorRange));
680 colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
684 // The final step is to compute the resulting color for each receptor. See class description for a comment
690 for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
695 m_receptors[i] = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
712 m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
773 LinearCameraGraphic(WObject *object, const wMatrix& transformation, QVector<SimpleInterval> receptorsRanges, QString name = "unamed") :
893 const wVector line1End = wVector(cos(m_receptorsRanges[i].start), sin(m_receptorsRanges[i].start), 0.0).scale(linearCameraReceptorsLength);
894 const wVector line2End = wVector(cos(m_receptorsRanges[i].end), sin(m_receptorsRanges[i].end), 0.0).scale(linearCameraReceptorsLength);
988 QVector<SimpleInterval> receptorsFromApertureAndNumReceptors(double aperture, unsigned int numReceptors)
992 aperture = ((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture));
998 r.append(SimpleInterval(apertureMin + i * receptorRange, apertureMin + (i + 1) * receptorRange));
1005 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
1023 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, QVector<SimpleInterval> receptorsRanges, double maxDistance, QColor backgroundColor) :
1081 // An helper structure memorizing information about colors in a single receptor. curInterval is
1135 // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
1148 objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
1150 // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
1155 for (QVector<PhyObject2DWrapper::AngularRangeAndColor>::const_iterator it = rangesAndColors.constBegin(); it != rangesAndColors.end(); ++it) {
1156 // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
1176 // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
1178 colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, -PI_GRECO, PI_GRECO, std::numeric_limits<double>::infinity()));
1180 // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
1181 // in each receptor. Before doing it we initialize the colorsInReceptors list so that the current
1194 colorIt->curInterval.unite(SimpleInterval(receptorMinAngle, PI_GRECO)).unite(SimpleInterval(-PI_GRECO, receptorMaxAngle));
1200 for (QList<ColorRangeAndDistance>::const_iterator colRangeIt = colorsRangesAndDistances.begin(); colRangeIt != colorsRangesAndDistances.end(); ++colRangeIt) {
1206 #warning PROVARE A VEDERE QUANTE VOLTE curLength È DIVERSO DA newLength ANCHE SE DOVREBBE ESSERE UGUALE (PER ERRORI NUMERICI)
1210 colorIt->colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(colRangeIt->color, fraction));
1215 // The final step is to compute the resulting color for each receptor. See class description for a comment
1223 for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorIt2->colorsAndFractions.begin(); it != colorIt2->colorsAndFractions.end(); ++it) {
1228 *recpActIt = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
1245 m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_receptorsRanges, "linearCamera");
1287 throw SampleFileLoadingException(m_filename.toLatin1().data(), "Cannot open file for reading");
1296 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toLatin1().data());
1303 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toLatin1().data());
1307 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toLatin1().data());
1311 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toLatin1().data());
1315 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toLatin1().data());
1319 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toLatin1().data());
1327 // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
1336 if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
1337 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid TURN line: \"" + turnLine + "\"").toLatin1().data());
1347 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toLatin1().data());
1354 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toLatin1().data());
1364 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toLatin1().data());
1381 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const