wheeledexperimenthelper.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #include "wheeledexperimenthelper.h"
25 #include "logger.h"
26 #include "phybox.h"
27 #include "phycylinder.h"
28 #include "robots.h"
29 #include "arena.h"
30 #include "intervals.h"
31 
32 namespace farsa {
33 
34 // This anonymous namespace contains helper functions used in this file
35 namespace {
48  wVector computeWallVertex(PhyBox* wall, unsigned int vertexID)
49  {
50  const wVector centerOnPlane = wall->matrix().w_pos - wall->matrix().z_ax.scale(wall->sideZ() / 2.0);
51  const wVector halfSide1Vector = wall->matrix().x_ax.scale(wall->sideX() / 2.0);
52  const wVector halfSide2Vector = wall->matrix().y_ax.scale(wall->sideY() / 2.0);
53 
54  wVector vertex;
55  switch(vertexID % 4) {
56  case 0:
57  vertex = centerOnPlane + halfSide1Vector + halfSide2Vector;
58  break;
59  case 1:
60  vertex = centerOnPlane + halfSide1Vector - halfSide2Vector;
61  break;
62  case 2:
63  vertex = centerOnPlane - halfSide1Vector + halfSide2Vector;
64  break;
65  case 3:
66  vertex = centerOnPlane - halfSide1Vector - halfSide2Vector;
67  break;
68  default:
69  break;
70  }
71 
72  return vertex;
73  }
74 
75 
85  double getAngleWithXAxis(const wMatrix& mtr, const wVector& v)
86  {
87  FARSA_DEBUG_TEST_INVALID(mtr.x_ax.x) FARSA_DEBUG_TEST_INVALID(mtr.x_ax.y) FARSA_DEBUG_TEST_INVALID(mtr.x_ax.z)
88  FARSA_DEBUG_TEST_INVALID(v.x) FARSA_DEBUG_TEST_INVALID(v.y) FARSA_DEBUG_TEST_INVALID(v.z)
89 
90  // Normalizing v
91  const wVector vdir = v.scale(1.0 / v.norm()); FARSA_DEBUG_TEST_INVALID(vdir.x) FARSA_DEBUG_TEST_INVALID(vdir.y) FARSA_DEBUG_TEST_INVALID(vdir.z)
92 
93  // To get the angle (unsigned), computing the acos of the dot product of the two vectors. We have to
94  // constrain the cross product between -1 and 1 because sometimes it can have values outside the range
95  const double crossProduct = min(1.0, max(-1.0, mtr.x_ax % vdir)); FARSA_DEBUG_TEST_INVALID(crossProduct)
96  const double unsignedAngle = acos(crossProduct); FARSA_DEBUG_TEST_INVALID(unsignedAngle)
97 
98  // Now choosing the right sign. To do this we first compute the cross product of the x axis and
99  // the vector direction, then we see if it has the same direction of Z or not
100  const double s = mtr.z_ax % (mtr.x_ax * vdir); FARSA_DEBUG_TEST_INVALID(s)
101 
102  return (s < 0.0) ? -unsignedAngle : unsignedAngle;
103  }
104 
146  template <class SegmentColorIt>
147  void computeLinearViewFieldOccupiedRangeForCircle(const wMatrix& cameraMtr, const wMatrix& objMtr, double radius, const SegmentColorIt segmentsColorBegin, const SegmentColorIt segmentsColorEnd, QVector<PhyObject2DWrapper::AngularRangeAndColor>& rangesAndColors, double& distance, double maxDistance)
148  {
149  // Useful constant
150  const real epsilon = 0.0001f;
151 
152  // We have to translate the camera to lie on the same plane of the cylinder base. We translate it along
153  // its local upvector (Z axis) until it reaches the plane containing the base of the cylinder. Of course
154  // this only works if the camera Z axis is not paraller to the plane with the base of the cylinder. In
155  // that case all computations would be invalid, so we don't do anything
156  wMatrix mtr = cameraMtr;
157  if (fabs(mtr.z_ax % objMtr.x_ax) < epsilon) {
158  distance = -1.0;
159  return;
160  }
161  mtr.w_pos = mtr.w_pos + mtr.z_ax.scale((objMtr.w_pos.z - mtr.w_pos.z) / mtr.z_ax.z); FARSA_DEBUG_TEST_INVALID(mtr.w_pos.x) FARSA_DEBUG_TEST_INVALID(mtr.w_pos.y) FARSA_DEBUG_TEST_INVALID(mtr.w_pos.z) FARSA_DEBUG_TEST_INVALID(mtr.z_ax.x) FARSA_DEBUG_TEST_INVALID(mtr.z_ax.y) FARSA_DEBUG_TEST_INVALID(mtr.z_ax.z) FARSA_DEBUG_TEST_INVALID(objMtr.w_pos.x) FARSA_DEBUG_TEST_INVALID(objMtr.w_pos.y) FARSA_DEBUG_TEST_INVALID(objMtr.w_pos.z)
162 
163  // First of all we have to calculate the distance between the center of the camera and the center of the
164  // cylinder. Here we also check that the camera is not inside the cylinder. The vector from the camera
165  // to the center of the cylinder is computed in the object frame of reference because we need it in this
166  // frame of reference later
167  const wVector centerDir = objMtr.unrotateVector(mtr.w_pos - objMtr.w_pos);
168  const double centerDistance = centerDir.norm();
169  distance = centerDistance - radius;
170  if ((distance < 0.0) || (distance > maxDistance)) {
171  distance = -1.0;
172  return;
173  }
174 
175  // We also need to compute the angles of the two tangent points to compute the ranges for
176  // various colors. The angles are relative to the cylinder center. The first thing we need
177  // is the angle between the vector from the camera to center of the cylinder and the radius
178  // perpendicular to the tangent
179  const double halfSectorAngle = acos(radius / centerDistance); FARSA_DEBUG_TEST_INVALID(halfSectorAngle)
180  // Now computing the starting and ending angle in the cylinder frame of reference. We need
181  // the angle of the centerDir vector, then we only have to subtract halfSectorAngle to get
182  // starting angle. The end angle is easy to obtain, then
183  const real startAngleInCylinder = normalizeRad(atan2(centerDir.z, centerDir.y) - halfSectorAngle);
184  const real endAngleInCylinder = normalizeRad(startAngleInCylinder + 2.0 * halfSectorAngle);
185 
186  // Clearing the vector that will contain segments and colors. It will first contain ranges in the
187  // cylinder frame of reference, and then the angles will be converted to linear camera ranges
188  rangesAndColors.clear();
189  if ((segmentsColorBegin + 1) == segmentsColorEnd) {
190  // There is only one color, we can simply add one element to the rangesAndColors vector
191  rangesAndColors.append(PhyObject2DWrapper::AngularRangeAndColor(startAngleInCylinder, endAngleInCylinder, segmentsColorBegin->color));
192  } else {
193  // The interval modelling the visible arc, with which all other arcs are intersected. If
194  // the arc crosses -pi/pi, the interval would go from -inf to end and from start to inf,
195  // so we restrict it to -pi, pi
196  const Intervals visibleArc = Intervals(SimpleInterval(startAngleInCylinder, endAngleInCylinder)) & SimpleInterval(-PI_GRECO, PI_GRECO);
197 
198  // This could perhaps be made a bit more efficient by checking if the we already analyzed
199  // all segments in the range, but it would be necessary only if we had objects with a lot
200  // of segments
201  for (SegmentColorIt it = segmentsColorBegin; it != segmentsColorEnd; ++it) {
202  // Computing the intersection between the current segment and the visible range and adding it. We don't
203  // need to intersect the cur segment with [-pi, pi] as we did for visibleArc because the intersection
204  // with the latter will remove all values below -pi or above pi
205  const Intervals curVisibleSegmentArc = visibleArc & it->intervals;
206  if (!curVisibleSegmentArc.isEmpty()) {
207  for (Intervals::const_iterator intrvIt = curVisibleSegmentArc.begin(); intrvIt != curVisibleSegmentArc.end(); ++intrvIt) {
208  rangesAndColors.append(PhyObject2DWrapper::AngularRangeAndColor(intrvIt->start, intrvIt->end, it->color));
209  }
210  }
211  }
212  }
213 
214  // Now converting angles to linear camera ranges
215  for (int i = 0; i < rangesAndColors.size(); i++) {
216  // Getting the points corresponding to the angles in the current range in the frame of reference
217  // of the cylinder
218  const wVector startPointCylinder(0.0, radius * cos(rangesAndColors[i].minAngle), radius * sin(rangesAndColors[i].minAngle)); FARSA_DEBUG_TEST_INVALID(startPointCylinder.x) FARSA_DEBUG_TEST_INVALID(startPointCylinder.y) FARSA_DEBUG_TEST_INVALID(startPointCylinder.z)
219  const wVector endPointCylinder(0.0, radius * cos(rangesAndColors[i].maxAngle), radius * sin(rangesAndColors[i].maxAngle)); FARSA_DEBUG_TEST_INVALID(endPointCylinder.x) FARSA_DEBUG_TEST_INVALID(endPointCylinder.y) FARSA_DEBUG_TEST_INVALID(endPointCylinder.z)
220 
221  // Now computing the points in the global frame of reference
222  const wVector startPoint = objMtr.transformVector(startPointCylinder); FARSA_DEBUG_TEST_INVALID(startPoint.x) FARSA_DEBUG_TEST_INVALID(startPoint.y) FARSA_DEBUG_TEST_INVALID(startPoint.z)
223  const wVector endPoint = objMtr.transformVector(endPointCylinder); FARSA_DEBUG_TEST_INVALID(endPoint.x) FARSA_DEBUG_TEST_INVALID(endPoint.y) FARSA_DEBUG_TEST_INVALID(endPoint.z)
224 
225  // Now computing the angles in the linear camera. As we don't know which is the start angle and which
226  // the end angle, we rely on the fact that for cylinders the camera cannot see a portion greater than
227  // PI_GRECO. We also check that the vectors to start and end point are not zero; if they are the angle is
228  // computed with respect to the center of the other cylinder
229  const wVector firstAngleVector = startPoint - mtr.w_pos;
230  const double firstAngleCamera = (fabs(firstAngleVector.norm()) < epsilon) ? getAngleWithXAxis(mtr, objMtr.w_pos - mtr.w_pos) : getAngleWithXAxis(mtr, firstAngleVector); FARSA_DEBUG_TEST_INVALID(firstAngleCamera)
231  const wVector secondAngleVector = endPoint - mtr.w_pos;
232  const double secondAngleCamera = (fabs(secondAngleVector.norm()) < epsilon) ? getAngleWithXAxis(mtr, objMtr.w_pos - mtr.w_pos) : getAngleWithXAxis(mtr, secondAngleVector); FARSA_DEBUG_TEST_INVALID(secondAngleCamera)
233  if (firstAngleCamera > secondAngleCamera) {
234  // Checking if they are on different sides of the -PI_GRECO/PI_GRECO boundary or on the same side. Here we exploit
235  // the fact that the camera cannot see more than PI_GRECO of the cylinder
236  if ((firstAngleCamera > (PI_GRECO / 2.0)) && (secondAngleCamera < (-PI_GRECO / 2.0))) {
237  rangesAndColors[i].minAngle = firstAngleCamera;
238  rangesAndColors[i].maxAngle = secondAngleCamera;
239  } else {
240  rangesAndColors[i].minAngle = secondAngleCamera;
241  rangesAndColors[i].maxAngle = firstAngleCamera;
242  }
243  } else {
244  // Checking if they are on different sides of the -PI_GRECO/PI_GRECO boundary or on the same side. Here we exploit
245  // the fact that the camera cannot see more than PI_GRECO of the cylinder
246  if ((firstAngleCamera < (-PI_GRECO / 2.0)) && (secondAngleCamera > (PI_GRECO / 2.0))) {
247  rangesAndColors[i].minAngle = secondAngleCamera;
248  rangesAndColors[i].maxAngle = firstAngleCamera;
249  } else {
250  rangesAndColors[i].minAngle = firstAngleCamera;
251  rangesAndColors[i].maxAngle = secondAngleCamera;
252  }
253  }
254  }
255  }
256 // Old implementation of the function above, kept for a while for reference
257 // void computeLinearViewFieldOccupiedRangeForCircle(const wMatrix& cameraMtr, const wMatrix& objMtr, double radius, const QList<PhyCylinder::SegmentColor>& segmentsColor, QVector<PhyObject2DWrapper::AngularRangeAndColor>& rangesAndColors, double& distance, double maxDistance)
258 // {
259 // // We have to translate the camera to lie on the same plane of the cylinder base. We translate it along
260 // // its local upvector (Z axis) until it reaches the plane containing the base of the cylinder. Of course
261 // // this only works if the camera Z axis is not paraller to the plane with the base of the cylinder. In
262 // // that case all computations would be invalid, so we don't do anything
263 // wMatrix mtr = cameraMtr;
264 // if (fabs(mtr.z_ax % objMtr.x_ax) < 0.0001) {
265 // distance = -1.0;
266 // return;
267 // }
268 // mtr.w_pos = mtr.w_pos + mtr.z_ax.scale((objMtr.w_pos.z - mtr.w_pos.z) / mtr.z_ax.z);
269 //
270 // // First of all we have to calculate the distance between the center of the camera and the center of the
271 // // cylinder. Here we also check that the camera is not inside the cylinder. The vector from the camera
272 // // to the center of the cylinder is computed in the object frame of reference because we need it in this
273 // // frame of reference later
274 // const wVector centerDir = objMtr.unrotateVector(mtr.w_pos - objMtr.w_pos);
275 // const double centerDistance = centerDir.norm();
276 // distance = centerDistance - radius;
277 // if ((distance < 0.0) || (distance > maxDistance)) {
278 // distance = -1.0;
279 // return;
280 // }
281 //
282 // // We also need to compute the angles of the two tangent points to compute the ranges for
283 // // various colors. The angles are relative to the cylinder center. The first thing we need
284 // // is the angle between the vector from the camera to center of the cylinder and the radius
285 // // perpendicular to the tangent
286 // const double halfSectorAngle = acos(radius / centerDistance);
287 // // Now computing the starting and ending angle in the cylinder frame of reference. We need
288 // // the angle of the centerDir vector, then we only have to subtract halfSectorAngle to get
289 // // starting angle. The end angle is easy to obtain, then
290 // const real startAngleInCylinder = normalizeRad(atan2(centerDir.z, centerDir.y) - halfSectorAngle);
291 // const real endAngleInCylinder = normalizeRad(startAngleInCylinder + 2.0 * halfSectorAngle);
292 //
293 // // Clearing the vector that will contain segments and colors. It will first contain ranges in the
294 // // cylinder frame of reference, and then the angles will be converted to linear camera ranges
295 // rangesAndColors.clear();
296 // if (segmentsColor.size() == 1) {
297 // // There is only one color, we can simply add one element to the rangesAndColors vector
298 // rangesAndColors.append(PhyObject2DWrapper::AngularRangeAndColor(startAngleInCylinder, endAngleInCylinder, segmentsColor[0].color));
299 // } else {
300 // // The interval modelling the visible arc, with which all other arcs are intersected. If
301 // // the arc crosses -pi/pi, the interval would go from -inf to end and from start to inf,
302 // // so we restrict it to -pi, pi
303 // const Intervals visibleArc = Intervals(SimpleInterval(startAngleInCylinder, endAngleInCylinder)) & SimpleInterval(-PI_GRECO, PI_GRECO);
304 //
305 // // This could perhaps be made a bit more efficient by checking if the we already analyzed
306 // // all segments in the range, but it would be necessary only if we had objects with a lot
307 // // of segments
308 // for (int i = 0; i < segmentsColor.size(); i++) {
309 // // Computing the intersection between the current segment and the visible range and adding it. We don't
310 // // need to intersect the cur segment with [-pi, pi] as we did for visibleArc because the intersection
311 // // with the latter will remove all values below -pi or above pi
312 // const Intervals curVisibleSegmentArc = visibleArc & segmentsColor[i].intervals;
313 // if (!curVisibleSegmentArc.isEmpty()) {
314 // for (Intervals::const_iterator it = curVisibleSegmentArc.begin(); it != curVisibleSegmentArc.end(); ++it) {
315 // rangesAndColors.append(PhyObject2DWrapper::AngularRangeAndColor(it->start, it->end, segmentsColor[i].color));
316 // }
317 // }
318 // }
319 // }
320 //
321 // // Now converting angles to linear camera ranges
322 // for (int i = 0; i < rangesAndColors.size(); i++) {
323 // // Getting the points corresponding to the angles in the current range in the frame of reference
324 // // of the cylinder
325 // const wVector startPointCylinder(0.0, radius * cos(rangesAndColors[i].minAngle), radius * sin(rangesAndColors[i].minAngle));
326 // const wVector endPointCylinder(0.0, radius * cos(rangesAndColors[i].maxAngle), radius * sin(rangesAndColors[i].maxAngle));
327 //
328 // // Now computing the points in the global frame of reference
329 // const wVector startPoint = objMtr.transformVector(startPointCylinder);
330 // const wVector endPoint = objMtr.transformVector(endPointCylinder);
331 //
332 // // Now computing the angles in the linear camera. As we don't know which is the start angle and which the end angle, we
333 // // rely on the fact that for cylinders the camera cannot see a portion greater than PI_GRECO
334 // const double firstAngleCamera = getAngleWithXAxis(mtr, startPoint - mtr.w_pos);
335 // const double secondAngleCamera = getAngleWithXAxis(mtr, endPoint - mtr.w_pos);
336 // if (firstAngleCamera > secondAngleCamera) {
337 // // Checking if they are on different sides of the -PI_GRECO/PI_GRECO boundary or on the same side. Here we exploit
338 // // the fact that the camera cannot see more than PI_GRECO of the cylinder
339 // if ((firstAngleCamera > (PI_GRECO / 2.0)) && (secondAngleCamera < (-PI_GRECO / 2.0))) {
340 // rangesAndColors[i].minAngle = firstAngleCamera;
341 // rangesAndColors[i].maxAngle = secondAngleCamera;
342 // } else {
343 // rangesAndColors[i].minAngle = secondAngleCamera;
344 // rangesAndColors[i].maxAngle = firstAngleCamera;
345 // }
346 // } else {
347 // // Checking if they are on different sides of the -PI_GRECO/PI_GRECO boundary or on the same side. Here we exploit
348 // // the fact that the camera cannot see more than PI_GRECO of the cylinder
349 // if ((firstAngleCamera < (-PI_GRECO / 2.0)) && (secondAngleCamera > (PI_GRECO / 2.0))) {
350 // rangesAndColors[i].minAngle = secondAngleCamera;
351 // rangesAndColors[i].maxAngle = firstAngleCamera;
352 // } else {
353 // rangesAndColors[i].minAngle = firstAngleCamera;
354 // rangesAndColors[i].maxAngle = secondAngleCamera;
355 // }
356 // }
357 // }
358 // }
359 
378  void computeDistanceAndOrientationFromRobotToCylinder(wVector robotPosition, double robotOrientation, double robotRadius, double radius, wVector position, double& distance, double& angle)
379  {
380  // Setting to 0.0 the z coordinate of positions
381  robotPosition.z = 0.0;
382  position.z = 0.0;
383 
384  // Computing the distance. We have to remove both the robot radius and the object radius
385  distance = (position - robotPosition).norm() - robotRadius - radius;
386 
387  // Now computing the angle between the robot and the object
388  angle = atan2(position.y - robotPosition.y, position.x - robotPosition.x) - robotOrientation;
389  }
390 }
391 
393  m_arena(arena),
394  m_previousMatrix()
395 {
396  // Nothing to do here
397 }
398 
400 {
401  // Nothing to do here
402 }
403 
405 {
406  return phyObject();
407 }
408 
410 {
411  return phyObject();
412 }
413 
415 {
416  if (phyObject() != NULL) {
417  phyObject()->setStatic(s);
418  }
419 }
420 
422 {
423  if (phyObject() != NULL) {
424  return phyObject()->getStatic();
425  } else {
426  return true;
427  }
428 }
429 
431 {
432  if (phyObject() != NULL) {
433  phyObject()->setKinematic(b, c);
434  }
435 }
436 
438 {
439  if (phyObject() != NULL) {
440  return phyObject()->getKinematic();
441  } else {
442  return true;
443  }
444 }
445 
447 {
448  setPosition(pos.x, pos.y);
449 }
450 
452 {
453  return wObject()->matrix().w_pos;
454 }
455 
456 void PhyObject2DWrapper::setTexture(QString textureName)
457 {
458  wObject()->setTexture(textureName);
459 }
460 
462 {
463  return wObject()->texture();
464 }
465 
467 {
468  wObject()->setColor(color);
469 }
470 
472 {
473  return wObject()->color();
474 }
475 
477 {
479 }
480 
482 {
483  return wObject()->useColorTextureOfOwner();
484 }
485 
487  PhyObject2DWrapper(arena),
488  m_box(box),
489  m_vertexes(QVector<wVector>() << computeWallVertex(m_box, 0) << computeWallVertex(m_box, 1) << computeWallVertex(m_box, 2) << computeWallVertex(m_box, 3)),
490  m_centerOnPlane(m_box->matrix().w_pos - m_box->matrix().z_ax.scale(m_box->sideZ() / 2.0)),
491  m_type(((type != Plane) && (type != Wall) && (type != RectangularTargetArea)) ? Box : type)
492 {
493  if (m_type == RectangularTargetArea) {
494  m_box->setKinematic(true, false);
495  } else if (m_type != Box) {
496  m_box->setStatic(true);
497  }
498 }
499 
501 {
502  // Nothing to do here
503 }
504 
506 {
507  return m_box;
508 }
509 
511 {
512  return m_box;
513 }
514 
516 {
517  return m_type;
518 }
519 
521 {
522  // Only Boxes can be made non-static
523  if (m_type != Box) {
524  return;
525  }
526 
527  phyObject()->setStatic(s);
528 }
529 
530 void Box2DWrapper::setKinematic(bool b, bool c)
531 {
532  if (m_type != RectangularTargetArea) {
534  }
535 }
536 
538 {
539  // Planes and Walls cannot be moved
540  if ((m_type == Plane) || (m_type == Wall)) {
541  return;
542  }
543 
544  wVector pos = phyObject()->matrix().w_pos;
545 
546  pos.x = x;
547  pos.y = y;
548 
549  phyObject()->setPosition(pos);
550 
551  // We also have to recompute the vertexes and center
552  for (unsigned int i = 0; i < 4; i++) {
553  m_vertexes[i] = computeWallVertex(m_box, i);
554  }
555  m_centerOnPlane = m_box->matrix().w_pos - m_box->matrix().z_ax.scale(m_box->sideZ() / 2.0);
556 }
557 
558 void Box2DWrapper::computeLinearViewFieldOccupiedRange(const wMatrix& cameraMtr, QVector<AngularRangeAndColor>& rangesAndColors, double& distance, double maxDistance) const
559 {
560  // Here for the moment we only have one color, so we can compute one range
561 
562  // If this is a Plane or a RectangularTargetArea, we simply return a negative distance (they are not visible with
563  // a linear camera)
564  if ((m_type == Plane) || (m_type == RectangularTargetArea)) {
565  distance = -1.0;
566  return;
567  }
568 
569  // We have to translate the camera to lie on the same plane of the vertex. We translate it along
570  // its local upvector (Z axis) until it reaches the plane containing the base of the wall. Of course
571  // this only works if the camera Z axis is not paraller to the plane with the base of the wall. In
572  // that case all computations would be invalid, so we don't do anything
573  wMatrix mtr = cameraMtr;
574  if (fabs(mtr.z_ax % m_box->matrix().z_ax) < 0.0001) {
575  distance = -1.0;
576  return;
577  }
578  mtr.w_pos = mtr.w_pos + mtr.z_ax.scale((m_centerOnPlane.z - mtr.w_pos.z) / mtr.z_ax.z);
579 
580  // First of all computing the angle for every vertex
581  QVector<double> angles(4);
582 
583  for (int i = 0; i < 4; i++) {
584  // Computing the vector giving the direction to the vertex
585  const wVector vdir = m_vertexes[i] - mtr.w_pos;
586 
587  // Now computing the angle
588  angles[i] = getAngleWithXAxis(mtr, vdir);
589  }
590 
591  // Now finding the min and max angle (their indexes). We have to take into account the fact that the
592  // angle with the minimum value could be the upper limit and viceversa because the object could be
593  // behind the camera. However we know that, as the camera is outside the wall, the maximum possible
594  // angular sector of the view filed occupied by the wall is 180°. This means that also the angular
595  // distance of one vertex with the center of the wall must be less than 180°. So, if we compute this
596  // distance and get a value greater than 180°, we have to take (360° - computed_angular_distance)
597  // and invert min with max.
598  const wVector centerDir = m_centerOnPlane - mtr.w_pos;
599  const double centerAngle = getAngleWithXAxis(mtr, centerDir);
600  int minAngleID = 0;
601  int maxAngleID = 0;
602 
603  // These two are the angular distances of the current min and max angles from the center. Their initial
604  // value is the lowest possible
605  double minDelta = 0.0;
606  double maxDelta = 0.0;
607 
608  for (int i = 0; i < 4; i++) {
609  const double curDelta = fabs(angles[i] - centerAngle);
610 
611  // Checking if the vertex and the center are behind the camera
612  if (curDelta > PI_GRECO) {
613  const double actualDelta = (2.0 * PI_GRECO) - curDelta;
614  if (angles[i] > centerAngle) {
615  // This is a candidate minimum angle
616  if (actualDelta > minDelta) {
617  minAngleID = i;
618  minDelta = actualDelta;
619  }
620  } else {
621  // This is a candidate maximum angle
622  if (actualDelta > maxDelta) {
623  maxAngleID = i;
624  maxDelta = actualDelta;
625  }
626  }
627  } else {
628  if (angles[i] < centerAngle) {
629  // This is a candidate minimum angle
630  if (curDelta > minDelta) {
631  minAngleID = i;
632  minDelta = curDelta;
633  }
634  } else {
635  // This is a candidate maximum angle
636  if (curDelta > maxDelta) {
637  maxAngleID = i;
638  maxDelta = curDelta;
639  }
640  }
641  }
642  }
643 
644  // Filling the minAngle and maxAngle parameters
645  rangesAndColors.clear();
646  rangesAndColors.append(AngularRangeAndColor(angles[minAngleID], angles[maxAngleID], color()));
647 
648 #if defined(__GNUC__) && defined(DEVELOPER_WARNINGS)
649  #warning QUESTO MODO DI CALCOLARE LA DISTANZA È SBAGLIATO (E NON NE CAPISCO IL SENSO), MA È QUELLO USATO IN EVOROBOT, QUINDI PER IL MOMENTO LO USO (ANCHE PERCHÉ USARE LA DISTANZA PER L OCCLUSIONE NON VA BENE COMUNQUE)
650 #endif
651  // Now computing distance. This way of calculating the distance is plainly wrong (and I can't
652  // see why it is written this way), but it is the method used by Evorobot, so for the moment
653  // using it (moreover using distance for occlusion is not correct)
654  distance = ((mtr.w_pos - m_vertexes[minAngleID]).norm() + (mtr.w_pos - m_vertexes[maxAngleID]).norm()) / 2.0;
655 
656  // Checking that the distance is less than the maximum one
657  if (distance > maxDistance) {
658  distance = -1.0;
659  }
660 }
661 
662 bool Box2DWrapper::computeDistanceAndOrientationFromRobot(const WheeledRobot2DWrapper& robot, double& distance, double& angle) const
663 {
664 #if defined(__GNUC__) && defined(DEVELOPER_WARNINGS)
665  #warning I CALCOLI PER DISTANZA E ORIENTAMENTO IN EVOROBOT SONO STRANI, QUI HO CERCATO DI FARE QUALCOSA CHE MI SEMBRI SENSATO...
666 #endif
667  // Only doing computations for walls and boxes
668  if ((m_type != Wall) && (m_type != Box)) {
669  return false;
670  }
671 
672  // Taking the robot position and setting z to lie on the same plane of the vertexes
673  const wVector robotPosition(robot.position().x, robot.position().y, m_vertexes[0].z);
674 
675  // Now computing the robot position in the box frame of reference
676  const wVector relRobotPosition = m_box->matrix().untransformVector(robotPosition);
677 
678  // Now we can find the point in the rectangle that is nearest to the robot position. As we work in the box
679  // frame of reference, the vertex are easy to compute. They are (discarding z):
680  // (+m_box->sideX() / 2.0, +m_box->sideY() / 2.0)
681  // (+m_box->sideX() / 2.0, -m_box->sideY() / 2.0)
682  // (-m_box->sideX() / 2.0, +m_box->sideY() / 2.0)
683  // (-m_box->sideX() / 2.0, -m_box->sideY() / 2.0)
684  // Finding the nearest point is just a matter of separately computing x and y.
685  real nearestX;
686  if (relRobotPosition.x < -m_box->sideX() / 2.0) {
687  nearestX = -m_box->sideX() / 2.0;
688  } else if (relRobotPosition.x > +m_box->sideX() / 2.0) {
689  nearestX = +m_box->sideX() / 2.0;
690  } else {
691  nearestX = relRobotPosition.x;
692  }
693  real nearestY;
694  if (relRobotPosition.y < -m_box->sideY() / 2.0) {
695  nearestY = -m_box->sideY() / 2.0;
696  } else if (relRobotPosition.y > +m_box->sideY() / 2.0) {
697  nearestY = +m_box->sideY() / 2.0;
698  } else {
699  nearestY = relRobotPosition.y;
700  }
701 
702  // Although distance is independent of the frame of reference, we convert the nearest point to the global frame
703  // of reference because we only have the robot orientation in that frame
704  const wVector nearestPoint = m_box->matrix().transformVector(wVector(nearestX, nearestY, relRobotPosition.z));
705 
706  // Now we can easily compute the distance and orientation. For the distance we have to remove the robot radius
707  distance = (nearestPoint - robotPosition).norm() - robot.getRadius();
708  const real robotOrientation = (dynamic_cast<const RobotOnPlane*>(robot.wObject()))->orientation(m_arena->getPlane());
709  angle = atan2(nearestPoint.y - robotPosition.y, nearestPoint.x - robotPosition.x) - robotOrientation;
710 
711  return true;
712 }
713 
715  PhyObject2DWrapper(arena),
716  m_cylinder(cylinder),
717  m_type(((type != SmallCylinder) && (type != BigCylinder) && (type != CircularTargetArea)) ? Cylinder : type)
718 {
719  if (m_type == CircularTargetArea) {
720  m_cylinder->setKinematic(true, false);
721  }
722 }
723 
725 {
726  // Nothing to do here
727 }
728 
730 {
731  return m_cylinder;
732 }
733 
735 {
736  return m_cylinder;
737 }
738 
740 {
741  // CircularTargetArea cannot be made non-static
742  if (m_type == CircularTargetArea) {
743  return;
744  }
745 
746  phyObject()->setStatic(s);
747 }
748 
750 {
751  // CircularTargetArea cannot be made non-kinematic
752  if (m_type == CircularTargetArea) {
753  return;
754  }
755 
756  phyObject()->setKinematic(b, c);
757 }
758 
760 {
761  return m_type;
762 }
763 
765 {
766  wVector pos = phyObject()->matrix().w_pos;
767 
768  pos.x = x;
769  pos.y = y;
770 
771  phyObject()->setPosition(pos);
772 }
773 
774 void Cylinder2DWrapper::computeLinearViewFieldOccupiedRange(const wMatrix& cameraMtr, QVector<AngularRangeAndColor>& rangesAndColors, double& distance, double maxDistance) const
775 {
776  // If this is a CircularTargetArea, we simply return a negative distance (it is not visible with
777  // a linear camera)
778  if (m_type == CircularTargetArea) {
779  distance = -1.0;
780  return;
781  }
782 
783  // Getting the vector with cylinder colors
784  computeLinearViewFieldOccupiedRangeForCircle(cameraMtr, m_cylinder->matrix(), m_cylinder->radius(), m_cylinder->segmentsColor().constBegin(), m_cylinder->segmentsColor().constEnd(), rangesAndColors, distance, maxDistance);
785 }
786 
787 bool Cylinder2DWrapper::computeDistanceAndOrientationFromRobot(const WheeledRobot2DWrapper& robot, double& distance, double& angle) const
788 {
789  // If this is a CircularTargetArea, we simply return a negative distance
790  if (m_type == CircularTargetArea) {
791  return false;
792  }
793 
794  const double robotOrientation = (dynamic_cast<const RobotOnPlane*>(robot.wObject()))->orientation(m_arena->getPlane());
795  computeDistanceAndOrientationFromRobotToCylinder(robot.position(), robotOrientation, robot.getRadius(), m_cylinder->radius(), m_cylinder->matrix().w_pos, distance, angle);
796 
797  return true;
798 }
799 
800 
801 Sphere2DWrapper::Sphere2DWrapper(Arena* arena, PhySphere* sphere, Type /*type*/) :
802  PhyObject2DWrapper(arena),
803  m_sphere(sphere),
804  m_type(LightBulb) // For the moment type can only be LightBulb
805 {
806  m_sphere->setKinematic(true);
807 }
808 
810 {
811  // Nothing to do here
812 }
813 
815 {
816  return m_sphere;
817 }
818 
820 {
821  return m_sphere;
822 }
823 
825 {
826  // The sphere is always static, this does nothing
827 }
828 
829 void Sphere2DWrapper::setKinematic(bool /*b*/, bool /*c*/)
830 {
831  // The sphere is always kinematic, this does nothing
832 }
833 
835 {
836  return m_type;
837 }
838 
840 {
841  wVector pos = phyObject()->matrix().w_pos;
842 
843  pos.x = x;
844  pos.y = y;
845 
846  phyObject()->setPosition(pos);
847 }
848 
849 void Sphere2DWrapper::computeLinearViewFieldOccupiedRange(const wMatrix& /*cameraMtr*/, QVector<AngularRangeAndColor>& /*rangesAndColors*/, double& distance, double /*maxDistance*/) const
850 {
851  // Light bulbs are not seen by the linear camera
852  distance = -1.0;
853  return;
854 }
855 
856 bool Sphere2DWrapper::computeDistanceAndOrientationFromRobot(const WheeledRobot2DWrapper& robot, double& distance, double& angle) const
857 {
858  wVector robotPosition = robot.position();
859  robotPosition.z = 0.0;
860  wVector position = m_sphere->matrix().w_pos;
861  position.z = 0.0;
862 
863  distance = (position - robotPosition).norm() - robot.getRadius() - m_sphere->radius();
864 
865  const double robotOrientation = (dynamic_cast<const RobotOnPlane*>(robot.wObject()))->orientation(m_arena->getPlane());
866  angle = atan2(position.y - robotPosition.y, position.x - robotPosition.x) - robotOrientation;
867 
868  return true;
869 }
870 
871 WheeledRobot2DWrapper::WheeledRobot2DWrapper(Arena* arena, RobotOnPlane* robot, double height, double radius) :
872  PhyObject2DWrapper(arena),
873  m_robot(robot),
874  m_height(height),
875  m_radius(radius)
876 {
877 }
878 
880 {
881 }
882 
884 {
885  return m_robot;
886 }
887 
889 {
890  return m_robot;
891 }
892 
894 {
895  return dynamic_cast<WObject*>(m_robot);
896 }
897 
899 {
900  return dynamic_cast<const WObject*>(m_robot);
901 }
902 
904 {
905  return NULL;
906 }
907 
909 {
910  return NULL;
911 }
912 
914 {
915  return WheeledRobot;
916 }
917 
919 {
920  wVector pos = wObject()->matrix().w_pos;
921 
922  pos.x = x;
923  pos.y = y;
924 
925  wObject()->setPosition(pos);
926 }
927 
928 void WheeledRobot2DWrapper::computeLinearViewFieldOccupiedRange(const wMatrix& cameraMtr, QVector<AngularRangeAndColor>& rangesAndColors, double& distance, double maxDistance) const
929 {
930  const wMatrix mtr = wObject()->matrix().rotateAround(wObject()->matrix().y_ax, wObject()->matrix().w_pos, -PI_GRECO / 2.0);
931 
932  // Getting the vector with cylinder colors. For the moment only the MarXbot can have multiple colors
933 #if defined(__GNUC__) && defined(DEVELOPER_WARNINGS)
934  #warning QUESTA ROBA È BRUTTA, IL CODICE PER LE SIMULAZIONI CON I WHEELED SI STA INGARBUGLIANDO...
935 #endif
936  PhyMarXbot* marxbot = dynamic_cast<PhyMarXbot*>(m_robot);
937  if (marxbot == NULL) {
938  // This cast should never fail!
939  WObject* r = dynamic_cast<WObject*>(m_robot);
940  PhyCylinder::SegmentColor s(SimpleInterval(-PI_GRECO, PI_GRECO), r->color());
941  computeLinearViewFieldOccupiedRangeForCircle(cameraMtr, mtr, m_radius, &s, (&s + 1), rangesAndColors, distance, maxDistance);
942  } else {
943  computeLinearViewFieldOccupiedRangeForCircle(cameraMtr, mtr, m_radius, marxbot->segmentsColor().constBegin(), marxbot->segmentsColor().constEnd(), rangesAndColors, distance, maxDistance);
944  }
945 }
946 
947 bool WheeledRobot2DWrapper::computeDistanceAndOrientationFromRobot(const WheeledRobot2DWrapper& robot, double& distance, double& angle) const
948 {
949  if (this == &robot) {
950  return false;
951  }
952 
953  const double robotOrientation = (dynamic_cast<const RobotOnPlane*>(robot.wObject()))->orientation(m_arena->getPlane());
954  computeDistanceAndOrientationFromRobotToCylinder(robot.position(), robotOrientation, robot.getRadius(), m_radius, position(), distance, angle);
955 
956  return true;
957 }
958 
959 wVector positionOnPlane(const Box2DWrapper* plane, real x, real y)
960 {
961  wVector pos = plane->position();
962 
963  pos.x = x;
964  pos.y = y;
965  pos.z += plane->phyObject()->sideZ() / 2.0f;
966 
967  return pos;
968 }
969 
970 void orientationOnPlane(const Box2DWrapper* plane, real angle, wMatrix& mtr)
971 {
972  wMatrix rotatedMtr = plane->phyObject()->matrix();
973 
974  // Now rotating the matrix around the Z axis
975  rotatedMtr = rotatedMtr.rotateAround(rotatedMtr.z_ax, rotatedMtr.w_pos, angle);
976 
977  // Setting the position of the rotated matrix to be the same as the original one
978  rotatedMtr.w_pos = mtr.w_pos;
979 
980  // Now overwriting the matrix
981  mtr = rotatedMtr;
982 }
983 
984 real angleBetweenXAxes(const wMatrix& mtr1, const wMatrix& mtr2)
985 {
986  // Taking the two x axes. We can take the x axis of mtr1 as is, while we need to project the X axis
987  // of mtr2 onto the XY plane of mtr1. To do so we simply project the x axis of mtr2 on the z axis of
988  // mtr1 and subtract this vector from the original x axis of mtr2
989  const wVector& x1 = mtr1.x_ax;
990  const wVector x2 = mtr2.x_ax - mtr1.z_ax.scale(mtr2.x_ax % mtr1.z_ax);
991 
992  // Now normalizing both axes
993  const wVector normX1 = x1.scale(1.0 / x1.norm());
994  const wVector normX2 = x2.scale(1.0 / x2.norm());
995 
996  // To get the angle (unsigned), computing the acos of the dot product of the two vectors
997  const double unsignedAngle = acos(normX1 % normX2);
998 
999  // Now choosing the right sign. To do this we first compute the cross product of the two x axes and
1000  // then we see if it has the same direction of the z axis of the first matrix or not
1001  const double s = mtr1.z_ax % (normX1 * normX2);
1002  return (s < 0.0) ? -unsignedAngle : unsignedAngle;
1003 }
1004 
1005 } // end namespace farsa