sensors.cpp
44 m_neuronsIteratorResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator"))),
45 m_additionalInputsResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "additionalInputsResource", "additionalInputs"))),
78 Descriptor d = addTypeDescription(type, "Adds input neurons that can be used for custom operations", "With this sensor you can specify how many additional inputs are needed in the controller. This also declares a resource that can be used to access the additional inputs");
79 d.describeInt("additionalInputs").def(1).limits(1,100).props(IsMandatory).help("The number of additional inputs that will be added to the controller (default 1)");
80 d.describeString("neuronsIterator").def("neuronsIterator").help("The name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
81 d.describeString("additionalInputsResource").def("additionalInputs").help("The name of the resource associated with the vector of additional inputs (default is \"additionalInputs\")");
134 neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
145 Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
159 Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
160 d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
161 d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
162 d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
163 d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
191 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
217 params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
218 params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
259 LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
383 const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
488 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, QColor backgroundColor) :
493 m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
548 // An helper class to ease computations with multiple intervals. This class starts with a single
591 } else if ((start >= it->start) && (start < it->end) && (end > it->start) && (end <= it->end)) {
633 // Intervals will always be ordered from the one with the lowest start to the one with the highest start.
638 // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
639 // are used to store the current portion of the receptor for which we already know the color, while
640 // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
668 #warning APPENA I ROBOT SONO NELLA LISTA DEGLI OGGETTI, BISOGNA RICORDARSI DI ESCLUDERE L OGGETTO CUI LA CAMERA È ATTACCATA QUANDO SI CALCOLA L ATTIVAZIONE
694 // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
703 // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
715 colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
718 colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
721 if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
730 // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
732 colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
734 // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
737 for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
740 const int maxIndex = min(m_numReceptors - 1, floor((it->maxAngle - m_apertureMin) / m_receptorRange));
751 const double fraction = min(1.0, colorsInReceptors[i].curInterval.removeInterval(it->minAngle, it->maxAngle));
752 colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
756 // The final step is to compute the resulting color for each receptor. See class description for a comment
762 for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
784 m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
834 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toAscii().data());
841 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toAscii().data());
845 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toAscii().data());
849 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toAscii().data());
853 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toAscii().data());
857 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toAscii().data());
865 // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
874 if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
875 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid TURN line: \"" + turnLine + "\"").toAscii().data());
885 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toAscii().data());
892 throw SampleFileLoadingException(m_filename.toAscii().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toAscii().data());
902 throw SampleFileLoadingException(m_filename.toAscii().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toAscii().data());
919 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const