sensors.cpp
1 /********************************************************************************
2  * FARSA Experiments Library *
3  * Copyright (C) 2007-2012 *
4  * Gianluca Massera <emmegian@yahoo.it> *
5  * Stefano Nolfi <stefano.nolfi@istc.cnr.it> *
6  * Tomassino Ferrauto <tomassino.ferrauto@istc.cnr.it> *
7  * Onofrio Gigliotta <onofrio.gigliotta@istc.cnr.it> *
8  * *
9  * This program is free software; you can redistribute it and/or modify *
10  * it under the terms of the GNU General Public License as published by *
11  * the Free Software Foundation; either version 2 of the License, or *
12  * (at your option) any later version. *
13  * *
14  * This program is distributed in the hope that it will be useful, *
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
17  * GNU General Public License for more details. *
18  * *
19  * You should have received a copy of the GNU General Public License *
20  * along with this program; if not, write to the Free Software *
21  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *
22  ********************************************************************************/
23 
24 #include "sensors.h"
25 #include "configurationhelper.h"
26 #include "motorcontrollers.h"
27 #include "logger.h"
28 #include "graphicalwobject.h"
29 #include "arena.h"
30 #include <QStringList>
31 #include <QList>
32 #include <QVector>
33 #include <QtAlgorithms>
34 #include <limits>
35 #include <cmath>
36 #include <QLinkedList>
37 #include <QFile>
38 #include <QTextStream>
39 
40 namespace farsa {
41 
43  Sensor(params, prefix),
44  m_additionalInputs(ConfigurationHelper::getUnsignedInt(params, prefix + "additionalInputs", 1)),
45  m_neuronsIteratorResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator"))),
46  m_additionalInputsResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "additionalInputsResource", "additionalInputs"))),
47  m_neuronsIterator(NULL)
48 {
50 
51  for (unsigned int i = 0; i < m_additionalInputs.size(); i++) {
52  m_additionalInputs[i] = 0.0;
53  }
54 }
55 
57 {
58  // Removing resources
59  try {
61  } catch (...) {
62  // Doing nothing, this is here just to prevent throwing an exception from the destructor
63  }
64 }
65 
66 void FakeSensor::save(ConfigurationParameters& params, QString prefix)
67 {
68  Sensor::save( params, prefix );
69  params.startObjectParameters(prefix, "FakeSensor", this);
70  params.createParameter(prefix, "additionalInputs", QString::number(m_additionalInputs.size()));
71  params.createParameter(prefix, "neuronsIterator", m_neuronsIteratorResource);
72  params.createParameter(prefix, "additionalInputsResource", m_additionalInputsResource);
73 }
74 
75 void FakeSensor::describe(QString type)
76 {
77  Sensor::describe(type);
78 
79  Descriptor d = addTypeDescription(type, "Adds input neurons that can be used for custom operations", "With this sensor you can specify how many additional inputs are needed in the controller. This also declares a resource that can be used to access the additional inputs");
80  d.describeInt("additionalInputs").def(1).limits(1,100).props(IsMandatory).help("The number of additional inputs that will be added to the controller (default 1)");
81  d.describeString("neuronsIterator").def("neuronsIterator").help("The name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
82  d.describeString("additionalInputsResource").def("additionalInputs").help("The name of the resource associated with the vector of additional inputs (default is \"additionalInputs\")");
83 }
84 
86 {
87  // Checking all resources we need exist
89 
90  ResourcesLocker locker(this);
91 
92  // Copying the output inside the vector of additional outputs
94  for (unsigned int i = 0; i < m_additionalInputs.size(); i++, m_neuronsIterator->nextNeuron()) {
96  }
97 }
98 
100 {
101  return m_additionalInputs.size();
102 }
103 
105 {
106  // Calling parent function
108 
109  // Now declaring our resource
111 }
112 
113 void FakeSensor::resourceChanged(QString resourceName, ResourceChangeType changeType)
114 {
115  if (changeType == Deleted) {
117  return;
118  }
119 
120  if (resourceName == m_neuronsIteratorResource) {
121  m_neuronsIterator = getResource<NeuronsIterator>();
123  for(int i = 0; i < size(); i++, m_neuronsIterator->nextNeuron()) {
124  m_neuronsIterator->setGraphicProperties("Fk" + QString::number(i), 0.0, 1.0, Qt::red);
125  }
126  } else if (resourceName != m_additionalInputsResource) {
127  Logger::info("Unknown resource " + resourceName + " for " + name());
128  }
129 }
130 
131 //ObjectPositionSensor : begin implementation
132 // it returns the absolute coordinate of an object into the world
134  Sensor(params, prefix) {
135  neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
136  objectName = ConfigurationHelper::getString( params, prefix+"object", "object" );
137  QVector<double> vec1 = ConfigurationHelper::getVector( params, prefix+"bbMin" );
138  QVector<double> vec2 = ConfigurationHelper::getVector( params, prefix+"bbMax" );
139  if ( vec1.size() == 3 && vec2.size() == 3 ) {
140  linearize = true;
141  bbMin = wVector( vec1[0], vec1[1], vec1[2] );
142  bbMax = wVector( vec2[0], vec2[1], vec2[2] );
143  } else {
144  linearize = false;
145  if ( ! (vec1.isEmpty() && vec2.isEmpty()) ) {
146  Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
147  }
148  }
149 
150  // Declaring the resources that are needed here
151  usableResources( QStringList() << objectName << neuronsIteratorResource );
152 }
153 
155  // nothing to do
156 }
157 
158 void ObjectPositionSensor::describe( QString type ) {
159  Sensor::describe( type );
160  Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
161  d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
162  d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
163  d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
164  d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
165 }
166 
168  // Checking all resources we need exist
170 
171  // Acquiring the lock to get resources
172  ResourcesLocker locker( this );
173 
174  WObject* object = getResource<WObject>( objectName );
175  wVector pos = object->matrix().w_pos;
176  NeuronsIterator* evonetIt = getResource<NeuronsIterator>( neuronsIteratorResource );
177  evonetIt->setCurrentBlock( name() );
178  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
179  if ( linearize ) {
180  // linearize into [0,1]
181  evonetIt->setInput( linearMap( pos[i], bbMin[i], bbMax[i], 0, 1 ) );
182  } else {
183  evonetIt->setInput( pos[i] );
184  }
185  }
186 }
187 
189  return 3;
190 }
191 
192 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
193  if (changeType == Deleted) {
195  return;
196  }
197 
198  if (resourceName == objectName) {
199  // Nothing to do here, we get the object with getResource() in update()
200  } else if (resourceName == neuronsIteratorResource) {
201  NeuronsIterator* evonetIt = getResource<NeuronsIterator>();
202  evonetIt->setCurrentBlock( name() );
203  for( int i=0; i<3; i++, evonetIt->nextNeuron() ) {
204  evonetIt->setGraphicProperties( QString("obj")+QString::number(i), -10.0, 10.0, Qt::red );
205  }
206  } else {
207  Logger::info("Unknown resource " + resourceName + " for " + name());
208  }
209 }
210 
212 {
213  Sensor::save( params, prefix );
214  params.startObjectParameters( prefix, "ObjectPositionSensor", this );
215  params.createParameter(prefix, "neuronsIterator", neuronsIteratorResource);
216  params.createParameter( prefix, "object", objectName );
217  if ( linearize ) {
218  params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
219  params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
220  }
221 }
222 //ObjectPositionSensor : end implementation
223 
224 namespace LinearCameraOld {
225  namespace __LinearCamera_internal {
226  #ifndef GLMultMatrix
227  #define GLMultMatrix glMultMatrixf
228  // for double glMultMatrixd
229  #endif
230 
234  const float linearCameraCubeSide = 0.02f;
235 
240  const float linearCameraReceptorsLength = 0.1f;
241 
246  {
247  public:
261  LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
262  GraphicalWObject(object->world(), name),
263  m_object(object),
264  m_minAngle(minAngle),
265  m_maxAngle(maxAngle),
266  m_numReceptors(numReceptors),
268  m_receptors(m_numReceptors, Qt::black)
269  {
270  // Attaching to object (which also becomes our owner)
271  attachToObject(m_object, true, transformation);
272 
273  // We also use our own color and texture
275  setTexture("");
276  setColor(Qt::white);
277  }
278 
283  {
284  }
285 
292  void setPerceivedColors(const QVector<QColor>& receptors)
293  {
294  m_receptorsMutex.lock();
295  m_receptors = receptors;
296  m_receptorsMutex.unlock();
297  }
298 
299  protected:
307  virtual void render(RenderWObject* renderer, QGLContext* gw)
308  {
309  // Bringing the frame of reference at the center of the camera
310  glPushMatrix();
311  renderer->container()->setupColorTexture(gw, renderer);
312  GLMultMatrix(&tm[0][0]);
313 
314  // First of all drawing the camera as a small white box. The face in the
315  // direction of view (X axis) is painted half green: the green part is the
316  // one in the direction of the upvector (Z axis)
317  glBegin(GL_QUADS);
318  const float hside = linearCameraCubeSide / 2.0;
319 
320  // front (top part)
321  glColor3f(0.0, 1.0, 0.0);
322  glNormal3f(1.0, 0.0, 0.0);
323  glVertex3f( hside, -hside, hside);
324  glVertex3f( hside, -hside, 0.0);
325  glVertex3f( hside, hside, 0.0);
326  glVertex3f( hside, hside, hside);
327 
328  // front (bottom part)
329  glColor3f(1.0, 1.0, 1.0);
330  glNormal3f(1.0, 0.0, 0.0);
331  glVertex3f( hside, -hside, 0.0);
332  glVertex3f( hside, -hside, -hside);
333  glVertex3f( hside, hside, -hside);
334  glVertex3f( hside, hside, 0.0);
335 
336  // back
337  glNormal3f(-1.0, 0.0, 0.0);
338  glVertex3f(-hside, -hside, -hside);
339  glVertex3f(-hside, -hside, hside);
340  glVertex3f(-hside, hside, hside);
341  glVertex3f(-hside, hside, -hside);
342 
343  // top
344  glNormal3f(0.0, 1.0, 0.0);
345  glVertex3f(-hside, hside, hside);
346  glVertex3f( hside, hside, hside);
347  glVertex3f( hside, hside, -hside);
348  glVertex3f(-hside, hside, -hside);
349 
350  // bottom
351  glNormal3f(0.0, -1.0, 0.0);
352  glVertex3f(-hside, -hside, -hside);
353  glVertex3f( hside, -hside, -hside);
354  glVertex3f( hside, -hside, hside);
355  glVertex3f(-hside, -hside, hside);
356 
357  // right
358  glNormal3f(0.0, 0.0, 1.0);
359  glVertex3f( hside, -hside, hside);
360  glVertex3f(-hside, -hside, hside);
361  glVertex3f(-hside, hside, hside);
362  glVertex3f( hside, hside, hside);
363 
364  // left
365  glNormal3f(0.0, 0.0, -1.0);
366  glVertex3f( hside, -hside, -hside);
367  glVertex3f(-hside, -hside, -hside);
368  glVertex3f(-hside, hside, -hside);
369  glVertex3f( hside, hside, -hside);
370 
371  glEnd();
372 
373  // Now we draw white lines to separare the various sectors of the camera
374  // Disabling lighting here (we want pure lines no matter from where we look at them)
375  glPushAttrib(GL_LIGHTING_BIT);
376  glDisable(GL_LIGHTING);
377  glLineWidth(2.5);
378  glColor3f(1.0, 1.0, 1.0);
379 
380  // Drawing the lines
381  glBegin(GL_LINES);
382  for (unsigned int i = 0; i <= m_numReceptors; i++) {
383  const double curAngle = m_minAngle + double(i) * m_receptorRange;
384 
385  const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
386 
387  glVertex3f(0.0, 0.0, 0.0);
388  glVertex3f(lineEnd.x, lineEnd.y, lineEnd.z);
389  }
390  glEnd();
391 
392  // Now drawing the state of receptors. Here we also have to lock the semaphore for
393  // the m_receptors vector
394  m_receptorsMutex.lock();
395 
396  // Drawing the status
397  glBegin(GL_QUADS);
398  glNormal3f(0.0, 1.0, 0.0);
399  const double colorPatchAngle = m_receptorRange / 3.0;
400  const double colorPatchMinLength = linearCameraReceptorsLength / 3.0;
401  const double colorPatchMaxLength = 2.0 * linearCameraReceptorsLength / 3.0;
402  for (unsigned int i = 0; i < m_numReceptors; i++) {
403  const double curAngle = m_minAngle + double(i) * m_receptorRange;
404 
405  for (unsigned int c = 0; c < 3; c++) {
406  const double startAngle = curAngle + double(c) * colorPatchAngle;
407  const double endAngle = curAngle + double(c + 1) * colorPatchAngle;
408 
409  // Computing the four vertexes
410  const wVector v1 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMinLength);
411  const wVector v2 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMaxLength);
412  const wVector v3 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMaxLength);
413  const wVector v4 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMinLength);
414 
415  // Setting the color
416  switch (c) {
417  case 0:
418  glColor3f(m_receptors[i].redF(), 0.0, 0.0);
419  break;
420  case 1:
421  glColor3f(0.0, m_receptors[i].greenF(), 0.0);
422  break;
423  case 2:
424  glColor3f(0.0, 0.0, m_receptors[i].blueF());
425  break;
426  default:
427  break;
428  }
429 
430  // Drawing the patch
431  glVertex3f(v1.x, v1.y, v1.z);
432  glVertex3f(v2.x, v2.y, v2.z);
433  glVertex3f(v3.x, v3.y, v3.z);
434  glVertex3f(v4.x, v4.y, v4.z);
435  }
436  }
437  glEnd();
438  m_receptorsMutex.unlock();
439 
440  // Restoring lighting status
441  glPopAttrib();
442 
443  glPopMatrix();
444  }
445 
450 
454  const double m_minAngle;
455 
459  const double m_maxAngle;
460 
464  const unsigned int m_numReceptors;
465 
471  const double m_receptorRange;
472 
476  QVector<QColor> m_receptors;
477 
485  };
486  }
487 
488  using namespace __LinearCamera_internal;
489 
490  LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
492  m_receptors(numReceptors),
493  m_object(obj),
494  m_transformation(mtr),
495  m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
496  m_numReceptors(numReceptors),
497  m_maxDistance(maxDistance),
498  m_backgroundColor(backgroundColor),
499  m_apertureMin(-m_aperture / 2.0),
500  m_apertureMax(m_aperture / 2.0),
501  m_receptorRange(m_aperture / double(m_numReceptors)),
502  m_arena(NULL),
503  m_drawCamera(false),
504  m_ignoreWalls(false),
505  m_graphicalCamera(NULL)
506 
507  {
508  // Stating which resources we use here
509  addUsableResource("arena");
510  }
511 
513  {
514  // Nothing to do here
515  }
516 
517  namespace {
518  // This namespace contains some structures used in the LinearCamera::update() function
519 
520  // The structure containing a color and the range of the camera field hit by this color.
521  // It also contains the distance from the camera for ordering.
522  struct ColorRangeAndDistance
523  {
524  ColorRangeAndDistance() :
525  color(),
526  minAngle(0.0),
527  maxAngle(0.0),
528  distance(0.0)
529  {
530  }
531 
532  ColorRangeAndDistance(QColor c, double min, double max, double d) :
533  color(c),
534  minAngle(min),
535  maxAngle(max),
536  distance(d)
537  {
538  }
539 
540  // This is to order objects of this type
541  bool operator<(const ColorRangeAndDistance& other) const
542  {
543  return (distance < other.distance);
544  }
545 
546  QColor color;
547  double minAngle;
548  double maxAngle;
549  double distance;
550  };
551 
552  // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
553  // are used to store the current portion of the receptor for which we already know the color, while
554  // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
555  struct ColorsInReceptor
556  {
557  Intervals curInterval;
558 
559  struct ColorAndFraction {
560  ColorAndFraction() :
561  color(),
562  fraction(0.0)
563  {
564  }
565 
566  ColorAndFraction(QColor c, double f) :
567  color(c),
568  fraction(f)
569  {
570  }
571 
572  QColor color;
573  double fraction;
574  };
575  QList<ColorAndFraction> colorsAndFractions;
576  };
577  }
578 
580  {
581  // Getting the list of objects from the arena (if we have the pointer to the arena)
582  if (m_arena == NULL) {
583  m_receptors.fill(m_backgroundColor);
584 
585  return;
586  }
587  const QVector<PhyObject2DWrapper*>& objectsList = m_arena->getObjects();
588 
589  // If no object is present, we can fill the receptors list with background colors and return
590  if (objectsList.size() == 0) {
591  m_receptors.fill(m_backgroundColor);
592 
593  return;
594  }
595 
596  // Updating the matrix with the current camera position
597  wMatrix currentMtr = m_transformation * m_object->matrix();
598 
599  // First of all we need to compute which color hits each receptor
600 
601  // Now filling the list with colors, ranges and distances. If an object is perceived at the
602  // extremities of the aperture, it is split in two different ColorRangeAndDistance objects
603  QList<ColorRangeAndDistance> colorsRangesAndDistances;
604 
605  // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
606  // this is not correct (occlusion doesn't work well) and so should be changed
607  for (int i = 0; i < objectsList.size(); i++) {
608  // Checking if we have to ignore a wall
609  if (m_ignoreWalls && (objectsList[i]->type() == PhyObject2DWrapper::Wall)) {
610  continue;
611  } else if (m_object == objectsList[i]->wObject()) {
612  // Skipping checks with self
613  continue;
614  }
615 
616  QVector<PhyObject2DWrapper::AngularRangeAndColor> rangesAndColors;
617  double distance;
618  objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
619 
620  // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
621  if ((distance < 0.0) || (distance > m_maxDistance)) {
622  continue;
623  }
624 
625  for (int j = 0; j < rangesAndColors.size(); j++) {
626  // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
627  const double minAngle = normalizeRad(rangesAndColors[j].minAngle);
628  const double maxAngle = normalizeRad(rangesAndColors[j].maxAngle);
629  const QColor color = rangesAndColors[j].color;
630 
631  // If the minAngle is greater than the maxAngle, splitting in two, so that we do not have to
632  // make special cases in the subsequent part of the function. Here we also check if the object
633  // is completely outside the view field or not (in the first case we don't add it to the list)
634  // We just check if the object is at least partially visible, we don't set the limits to be
635  // within the view field
636  if (minAngle > maxAngle) {
637  if ((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) {
638  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
639  }
640  if ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax)) {
641  colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
642  }
643  } else {
644  if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
645  colorsRangesAndDistances.append(ColorRangeAndDistance(color, max(minAngle, m_apertureMin), min(maxAngle, m_apertureMax), distance));
646  }
647  }
648  }
649  }
650 
651  // Ordering colors by distance from the camera
652  qSort(colorsRangesAndDistances);
653 
654  // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
655  // the whole field with a valid color
656  colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
657 
658  // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
659  // in each receptor
660  QVector<ColorsInReceptor> colorsInReceptors(m_numReceptors);
661  for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
662  // Computing the index of receptors which are interested by this color
663  const int minIndex = max(0.0, floor((it->minAngle - m_apertureMin) / m_receptorRange));
664  const int maxIndex = min(double(m_numReceptors - 1), floor((it->maxAngle - m_apertureMin) / m_receptorRange));
665 
666  // Now cycling over the computed receptors in the colorsInReceptors list to fill it
667  for (int i = minIndex; i <= maxIndex; i++) {
668  const double receptorMin = m_apertureMin + m_receptorRange * double(i);
669  const double receptorMax = m_apertureMin + m_receptorRange * double(i + 1);
670  const double initLength = receptorMax - receptorMin;
671  if (colorsInReceptors[i].colorsAndFractions.size() == 0) {
672  // This is the first color in the receptor, we have to initialize the interval
673  colorsInReceptors[i].curInterval.unite(SimpleInterval(receptorMin, receptorMax));
674  }
675 
676  const double curLength = colorsInReceptors[i].curInterval.length();
677  colorsInReceptors[i].curInterval.subtract(SimpleInterval(it->minAngle, it->maxAngle));
678  const double newLength = colorsInReceptors[i].curInterval.length();
679  const double fraction = min(1.0, (curLength - newLength) / initLength);
680  colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
681  }
682  }
683 
684  // The final step is to compute the resulting color for each receptor. See class description for a comment
685  // on this procedure
686  for (unsigned int i = 0; i < m_numReceptors; i++) {
687  double red = 0.0;
688  double green = 0.0;
689  double blue = 0.0;
690  for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
691  red += it->color.redF() * it->fraction;
692  green += it->color.greenF() * it->fraction;
693  blue += it->color.blueF() * it->fraction;
694  }
695  m_receptors[i] = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
696  }
697 
698  // Updating graphics if we have to
699  if (m_drawCamera) {
700  m_graphicalCamera->setPerceivedColors(m_receptors);
701  }
702  }
703 
705  {
706  if (m_drawCamera == d) {
707  return;
708  }
709 
710  m_drawCamera = d;
711  if (m_drawCamera) {
712  m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
713  } else {
714  delete m_graphicalCamera;
715  }
716  }
717 
718  void LinearCamera::resourceChanged(QString resourceName, ResourceChangeType changeType)
719  {
720  if (resourceName == "arena") {
721  switch (changeType) {
722  case Created:
723  case Modified:
724  m_arena = getResource<Arena>();
725  break;
726  case Deleted:
727  m_arena = NULL;
728  break;
729  }
730  } else {
731  Logger::info("Unknown resource " + resourceName + " (in LinearCamera)");
732  }
733  }
734 }
735 
736 namespace LinearCameraNew {
737  namespace __LinearCamera_internal {
738  #ifndef GLMultMatrix
739  #define GLMultMatrix glMultMatrixf
740  // for double glMultMatrixd
741  #endif
742 
746  const float linearCameraCubeSide = 0.02f;
747 
752  const float linearCameraReceptorsLength = 0.1f;
753 
758  {
759  public:
773  LinearCameraGraphic(WObject *object, const wMatrix& transformation, QVector<SimpleInterval> receptorsRanges, QString name = "unamed") :
774  GraphicalWObject(object->world(), name),
775  m_object(object),
776  m_receptorsRanges(receptorsRanges),
777  m_receptors(m_receptorsRanges.size(), Qt::black),
779  {
780  // Attaching to object (which also becomes our owner)
781  attachToObject(m_object, true, transformation);
782 
783  // We also use our own color and texture
785  setTexture("");
786  setColor(Qt::white);
787  }
788 
793  {
794  }
795 
802  void setPerceivedColors(const QVector<QColor>& receptors)
803  {
804  m_receptorsMutex.lock();
805  m_receptors = receptors;
806  m_receptorsMutex.unlock();
807  }
808 
809  protected:
817  virtual void render(RenderWObject* renderer, QGLContext* gw)
818  {
819  // Bringing the frame of reference at the center of the camera
820  glPushMatrix();
821  renderer->container()->setupColorTexture(gw, renderer);
822  GLMultMatrix(&tm[0][0]);
823 
824  // First of all drawing the camera as a small white box. The face in the
825  // direction of view (X axis) is painted half green: the green part is the
826  // one in the direction of the upvector (Z axis)
827  glBegin(GL_QUADS);
828  const float hside = linearCameraCubeSide / 2.0;
829 
830  // front (top part)
831  glColor3f(0.0, 1.0, 0.0);
832  glNormal3f(1.0, 0.0, 0.0);
833  glVertex3f( hside, -hside, hside);
834  glVertex3f( hside, -hside, 0.0);
835  glVertex3f( hside, hside, 0.0);
836  glVertex3f( hside, hside, hside);
837 
838  // front (bottom part)
839  glColor3f(1.0, 1.0, 1.0);
840  glNormal3f(1.0, 0.0, 0.0);
841  glVertex3f( hside, -hside, 0.0);
842  glVertex3f( hside, -hside, -hside);
843  glVertex3f( hside, hside, -hside);
844  glVertex3f( hside, hside, 0.0);
845 
846  // back
847  glNormal3f(-1.0, 0.0, 0.0);
848  glVertex3f(-hside, -hside, -hside);
849  glVertex3f(-hside, -hside, hside);
850  glVertex3f(-hside, hside, hside);
851  glVertex3f(-hside, hside, -hside);
852 
853  // top
854  glNormal3f(0.0, 1.0, 0.0);
855  glVertex3f(-hside, hside, hside);
856  glVertex3f( hside, hside, hside);
857  glVertex3f( hside, hside, -hside);
858  glVertex3f(-hside, hside, -hside);
859 
860  // bottom
861  glNormal3f(0.0, -1.0, 0.0);
862  glVertex3f(-hside, -hside, -hside);
863  glVertex3f( hside, -hside, -hside);
864  glVertex3f( hside, -hside, hside);
865  glVertex3f(-hside, -hside, hside);
866 
867  // right
868  glNormal3f(0.0, 0.0, 1.0);
869  glVertex3f( hside, -hside, hside);
870  glVertex3f(-hside, -hside, hside);
871  glVertex3f(-hside, hside, hside);
872  glVertex3f( hside, hside, hside);
873 
874  // left
875  glNormal3f(0.0, 0.0, -1.0);
876  glVertex3f( hside, -hside, -hside);
877  glVertex3f(-hside, -hside, -hside);
878  glVertex3f(-hside, hside, -hside);
879  glVertex3f( hside, hside, -hside);
880 
881  glEnd();
882 
883  // Now we draw white lines to separare the various sectors of the camera
884  // Disabling lighting here (we want pure lines no matter from where we look at them)
885  glPushAttrib(GL_LIGHTING_BIT);
886  glDisable(GL_LIGHTING);
887  glLineWidth(2.5);
888  glColor3f(1.0, 1.0, 1.0);
889 
890  // Drawing the lines
891  glBegin(GL_LINES);
892  for (int i = 0; i < m_receptorsRanges.size(); i++) {
893  const wVector line1End = wVector(cos(m_receptorsRanges[i].start), sin(m_receptorsRanges[i].start), 0.0).scale(linearCameraReceptorsLength);
894  const wVector line2End = wVector(cos(m_receptorsRanges[i].end), sin(m_receptorsRanges[i].end), 0.0).scale(linearCameraReceptorsLength);
895 
896  glVertex3f(0.0, 0.0, 0.0);
897  glVertex3f(line1End.x, line1End.y, line1End.z);
898  glVertex3f(0.0, 0.0, 0.0);
899  glVertex3f(line2End.x, line2End.y, line2End.z);
900  }
901  glEnd();
902 
903  // Now drawing the state of receptors. Here we also have to lock the semaphore for
904  // the m_receptors vector
905  m_receptorsMutex.lock();
906 
907  // Drawing the status
908  glBegin(GL_QUADS);
909  glNormal3f(0.0, 1.0, 0.0);
910  const double colorPatchMinLength = linearCameraReceptorsLength / 3.0;
911  const double colorPatchMaxLength = 2.0 * linearCameraReceptorsLength / 3.0;
912  for (int i = 0; i < m_receptorsRanges.size(); i++) {
913  const double colorPatchAngle = m_receptorsRanges[i].length() / 3.0;
914  const double curAngle = m_receptorsRanges[i].start;
915 
916  for (unsigned int c = 0; c < 3; c++) {
917  const double startAngle = curAngle + double(c) * colorPatchAngle;
918  const double endAngle = curAngle + double(c + 1) * colorPatchAngle;
919 
920  // Computing the four vertexes
921  const wVector v1 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMinLength);
922  const wVector v2 = wVector(cos(startAngle), sin(startAngle), 0.0).scale(colorPatchMaxLength);
923  const wVector v3 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMaxLength);
924  const wVector v4 = wVector(cos(endAngle), sin(endAngle), 0.0).scale(colorPatchMinLength);
925 
926  // Setting the color
927  switch (c) {
928  case 0:
929  glColor3f(m_receptors[i].redF(), 0.0, 0.0);
930  break;
931  case 1:
932  glColor3f(0.0, m_receptors[i].greenF(), 0.0);
933  break;
934  case 2:
935  glColor3f(0.0, 0.0, m_receptors[i].blueF());
936  break;
937  default:
938  break;
939  }
940 
941  // Drawing the patch
942  glVertex3f(v1.x, v1.y, v1.z);
943  glVertex3f(v2.x, v2.y, v2.z);
944  glVertex3f(v3.x, v3.y, v3.z);
945  glVertex3f(v4.x, v4.y, v4.z);
946  }
947  }
948  glEnd();
949  m_receptorsMutex.unlock();
950 
951  // Restoring lighting status
952  glPopAttrib();
953 
954  glPopMatrix();
955  }
956 
961 
965  const QVector<SimpleInterval> m_receptorsRanges;
966 
970  QVector<QColor> m_receptors;
971 
979  };
980  }
981 
982  using namespace __LinearCamera_internal;
983 
984  namespace {
985  // This namespace contains utility functions used in the constructor
986 
987  // Generates a list of receptors ranges from aperture and number of receptors
988  QVector<SimpleInterval> receptorsFromApertureAndNumReceptors(double aperture, unsigned int numReceptors)
989  {
990  QVector<SimpleInterval> r;
991  // Clamping aperture in the interval [0, 2pi]
992  aperture = ((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture));
993 
994  const double apertureMin = -aperture / 2.0;
995  const double receptorRange = aperture / double(numReceptors);
996 
997  for (unsigned int i = 0; i < numReceptors; i++) {
998  r.append(SimpleInterval(apertureMin + i * receptorRange, apertureMin + (i + 1) * receptorRange));
999  }
1000 
1001  return r;
1002  }
1003  }
1004 
1005  LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
1007  m_receptors(numReceptors),
1008  m_object(obj),
1009  m_transformation(mtr),
1010  m_receptorsRanges(receptorsFromApertureAndNumReceptors(aperture, numReceptors)),
1011  m_maxDistance(maxDistance),
1012  m_backgroundColor(backgroundColor),
1013  m_arena(NULL),
1014  m_drawCamera(false),
1015  m_ignoreWalls(false),
1016  m_graphicalCamera(NULL)
1017 
1018  {
1019  // Stating which resources we use here
1020  addUsableResource("arena");
1021  }
1022 
1023  LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, QVector<SimpleInterval> receptorsRanges, double maxDistance, QColor backgroundColor) :
1025  m_receptors(receptorsRanges.size()),
1026  m_object(obj),
1027  m_transformation(mtr),
1028  m_receptorsRanges(receptorsRanges),
1029  m_maxDistance(maxDistance),
1030  m_backgroundColor(backgroundColor),
1031  m_arena(NULL),
1032  m_drawCamera(false),
1033  m_ignoreWalls(false),
1034  m_graphicalCamera(NULL)
1035 
1036  {
1037  // Stating which resources we use here
1038  addUsableResource("arena");
1039  }
1040 
1042  {
1043  // Nothing to do here
1044  }
1045 
1046  namespace {
1047  // This namespace contains some structures used in the LinearCamera::update() function
1048 
1049  // The structure containing a color and the range of the camera field hit by this color.
1050  // It also contains the distance from the camera for ordering.
1051  struct ColorRangeAndDistance
1052  {
1053  ColorRangeAndDistance() :
1054  color(),
1055  minAngle(0.0),
1056  maxAngle(0.0),
1057  distance(0.0)
1058  {
1059  }
1060 
1061  ColorRangeAndDistance(QColor c, double min, double max, double d) :
1062  color(c),
1063  minAngle(min),
1064  maxAngle(max),
1065  distance(d)
1066  {
1067  }
1068 
1069  // This is to order objects of this type
1070  bool operator<(const ColorRangeAndDistance& other) const
1071  {
1072  return (distance < other.distance);
1073  }
1074 
1075  QColor color;
1076  double minAngle;
1077  double maxAngle;
1078  double distance;
1079  };
1080 
1081  // An helper structure memorizing information about colors in a single receptor. curInterval is
1082  // the interval which is not covered by any color, while colorsAndFractions is the list of
1083  // colors and the portion of the receptor occupied by that color
1084  struct ColorsInReceptor
1085  {
1086  Intervals curInterval;
1087  real initialLength;
1088 
1089  struct ColorAndFraction {
1090  ColorAndFraction() :
1091  color(),
1092  fraction(0.0)
1093  {
1094  }
1095 
1096  ColorAndFraction(QColor c, double f) :
1097  color(c),
1098  fraction(f)
1099  {
1100  }
1101 
1102  QColor color;
1103  double fraction;
1104  };
1105  QList<ColorAndFraction> colorsAndFractions;
1106  };
1107  }
1108 
1110  {
1111  // Getting the list of objects from the arena (if we have the pointer to the arena)
1112  if (m_arena == NULL) {
1113  m_receptors.fill(m_backgroundColor);
1114 
1115  return;
1116  }
1117  const QVector<PhyObject2DWrapper*>& objectsList = m_arena->getObjects();
1118 
1119  // If no object is present, we can fill the receptors list with background colors and return
1120  if (objectsList.size() == 0) {
1121  m_receptors.fill(m_backgroundColor);
1122 
1123  return;
1124  }
1125 
1126  // Updating the matrix with the current camera position
1127  wMatrix currentMtr = m_transformation * m_object->matrix();
1128 
1129  // First of all we need to compute which color hits each receptor
1130 
1131  // Now filling the list with colors, ranges and distances. If an object is perceived at the
1132  // extremities of the aperture, it is split in two different ColorRangeAndDistance objects
1133  QList<ColorRangeAndDistance> colorsRangesAndDistances;
1134 
1135  // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
1136  // this is not correct (occlusion doesn't work well) and so should be changed
1137  for (int i = 0; i < objectsList.size(); i++) {
1138  // Checking if we have to ignore a wall
1139  if (m_ignoreWalls && (objectsList[i]->type() == PhyObject2DWrapper::Wall)) {
1140  continue;
1141  } else if (m_object == objectsList[i]->wObject()) {
1142  // Skipping checks with self
1143  continue;
1144  }
1145 
1146  QVector<PhyObject2DWrapper::AngularRangeAndColor> rangesAndColors;
1147  double distance;
1148  objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
1149 
1150  // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
1151  if ((distance < 0.0) || (distance > m_maxDistance)) {
1152  continue;
1153  }
1154 
1155  for (QVector<PhyObject2DWrapper::AngularRangeAndColor>::const_iterator it = rangesAndColors.constBegin(); it != rangesAndColors.end(); ++it) {
1156  // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
1157  FARSA_DEBUG_TEST_INVALID(it->minAngle) FARSA_DEBUG_TEST_INVALID(it->maxAngle)
1158  const double minAngle = normalizeRad(it->minAngle);
1159  const double maxAngle = normalizeRad(it->maxAngle);
1160  const QColor color = it->color;
1161 
1162  // If the minAngle is greater than the maxAngle, splitting in two, so that we do not have to
1163  // make special cases in the subsequent part of the function.
1164  if (minAngle > maxAngle) {
1165  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, PI_GRECO, distance));
1166  colorsRangesAndDistances.append(ColorRangeAndDistance(color, -PI_GRECO, maxAngle, distance));
1167  } else {
1168  colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, maxAngle, distance));
1169  }
1170  }
1171  }
1172 
1173  // Ordering colors by distance from the camera
1174  qSort(colorsRangesAndDistances);
1175 
1176  // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
1177  // the whole field with a valid color
1178  colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, -PI_GRECO, PI_GRECO, std::numeric_limits<double>::infinity()));
1179 
1180  // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
1181  // in each receptor. Before doing it we initialize the colorsInReceptors list so that the current
1182  // interval is equal to the receptor range
1183  QVector<ColorsInReceptor> colorsInReceptors(getNumReceptors());
1184  QVector<ColorsInReceptor>::iterator colorIt = colorsInReceptors.begin();
1185  QVector<SimpleInterval>::const_iterator recpIt = m_receptorsRanges.constBegin();
1186  for (; colorIt != colorsInReceptors.end(); ++colorIt, ++recpIt) {
1187  // Normalizing the receptor range between -PI_GRECO and PI_GRECO
1188  const double receptorMinAngle = normalizeRad(recpIt->start);
1189  const double receptorMaxAngle = normalizeRad(recpIt->end);
1190 
1191  // Checking if the receptor crosses -PI_GRECO. If so we split the interval in two
1192  if (receptorMinAngle > receptorMaxAngle) {
1193  // The receptor crosses -PI_GRECO
1194  colorIt->curInterval.unite(SimpleInterval(receptorMinAngle, PI_GRECO)).unite(SimpleInterval(-PI_GRECO, receptorMaxAngle));
1195  } else {
1196  colorIt->curInterval.unite(SimpleInterval(receptorMinAngle, receptorMaxAngle));
1197  }
1198  colorIt->initialLength = colorIt->curInterval.length();
1199  }
1200  for (QList<ColorRangeAndDistance>::const_iterator colRangeIt = colorsRangesAndDistances.begin(); colRangeIt != colorsRangesAndDistances.end(); ++colRangeIt) {
1201  for (colorIt = colorsInReceptors.begin(); colorIt != colorsInReceptors.end(); ++colorIt) {
1202  const real curLength = colorIt->curInterval.length();
1203  colorIt->curInterval -= SimpleInterval(colRangeIt->minAngle, colRangeIt->maxAngle);
1204  const real newLength = colorIt->curInterval.length();
1205 #if defined(__GNUC__) && defined(DEVELOPER_WARNINGS)
1206  #warning PROVARE A VEDERE QUANTE VOLTE curLength È DIVERSO DA newLength ANCHE SE DOVREBBE ESSERE UGUALE (PER ERRORI NUMERICI)
1207 #endif
1208  if (curLength != newLength) {
1209  const real fraction = min(1.0, (curLength - newLength) / colorIt->initialLength);
1210  colorIt->colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(colRangeIt->color, fraction));
1211  }
1212  }
1213  }
1214 
1215  // The final step is to compute the resulting color for each receptor. See class description for a comment
1216  // on this procedure
1217  QVector<ColorsInReceptor>::const_iterator colorIt2 = colorsInReceptors.constBegin();
1218  QVector<QColor>::iterator recpActIt = m_receptors.begin();
1219  for (; colorIt2 != colorsInReceptors.end(); ++colorIt2, ++recpActIt) {
1220  double red = 0.0;
1221  double green = 0.0;
1222  double blue = 0.0;
1223  for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorIt2->colorsAndFractions.begin(); it != colorIt2->colorsAndFractions.end(); ++it) {
1224  red += it->color.redF() * it->fraction;
1225  green += it->color.greenF() * it->fraction;
1226  blue += it->color.blueF() * it->fraction;
1227  }
1228  *recpActIt = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
1229  }
1230 
1231  // Updating graphics if we have to
1232  if (m_drawCamera) {
1233  m_graphicalCamera->setPerceivedColors(m_receptors);
1234  }
1235  }
1236 
1238  {
1239  if (m_drawCamera == d) {
1240  return;
1241  }
1242 
1243  m_drawCamera = d;
1244  if (m_drawCamera) {
1245  m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_receptorsRanges, "linearCamera");
1246  } else {
1247  delete m_graphicalCamera;
1248  }
1249  }
1250 
1251  void LinearCamera::resourceChanged(QString resourceName, ResourceChangeType changeType)
1252  {
1253  if (resourceName == "arena") {
1254  switch (changeType) {
1255  case Created:
1256  case Modified:
1257  m_arena = getResource<Arena>();
1258  break;
1259  case Deleted:
1260  m_arena = NULL;
1261  break;
1262  }
1263  } else {
1264  Logger::info("Unknown resource " + resourceName + " (in LinearCamera)");
1265  }
1266  }
1267 }
1268 
1270  m_filename(filename),
1271  m_numIR(0),
1272  m_numSamplingAngles(0),
1273  m_numDistances(0),
1274  m_initialDistance(0.0f),
1275  m_distanceInterval(0.0f),
1276  m_finalDistance(0.0f),
1277  m_activations(),
1278  m_nullActivations()
1279 {
1280  // The maximum length of a line. This value is greater than needed, we use it just
1281  // to avoid problems with maalformed files
1282  const int maxLineLength = 1024;
1283 
1284  // Opening the input file
1285  QFile file(m_filename);
1286  if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
1287  throw SampleFileLoadingException(m_filename.toLatin1().data(), "Cannot open file for reading");
1288  }
1289 
1290  // Now opening a text stream on the file to read it
1291  QTextStream in(&file);
1292 
1293  // Reading the first line, the one with configuration parameters and splitting it
1294  QStringList confs = in.readLine(maxLineLength).split(" ", QString::SkipEmptyParts);
1295  if (confs.size() != 5) {
1296  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toLatin1().data());
1297  }
1298 
1299  // Now converting the elements of the configuration line
1300  bool ok;
1301  m_numIR = confs[0].toUInt(&ok);
1302  if (!ok) {
1303  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toLatin1().data());
1304  }
1305  m_numSamplingAngles = confs[1].toUInt(&ok);
1306  if (!ok) {
1307  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toLatin1().data());
1308  }
1309  m_numDistances = confs[2].toUInt(&ok);
1310  if (!ok) {
1311  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toLatin1().data());
1312  }
1313  m_initialDistance = confs[3].toFloat(&ok) / 1000.0;
1314  if (!ok) {
1315  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toLatin1().data());
1316  }
1317  m_distanceInterval = confs[4].toFloat(&ok) / 1000.0;
1318  if (!ok) {
1319  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toLatin1().data());
1320  }
1321  m_finalDistance = m_initialDistance + (m_numDistances - 1) * m_distanceInterval;
1322 
1323  // Resizing the vector of activations
1324  m_activations.resize(m_numIR * m_numSamplingAngles * m_numDistances);
1325  m_nullActivations.fill(0, m_numIR);
1326 
1327  // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
1328  // in a "creative" way...
1329  int i = 0; // The index over the m_activations array
1330  for (unsigned int dist = 0; dist < m_numDistances; dist++) {
1331  QString turnLine = in.readLine(maxLineLength);
1332  QStringList turnLineSplitted = turnLine.split(" ", QString::SkipEmptyParts);
1333 
1334  // The line we just read should have been split in two. The first element should
1335  // be equal to "TURN", the second one to the current dist
1336  if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
1337  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid TURN line: \"" + turnLine + "\"").toLatin1().data());
1338  }
1339 
1340  // Now reading the block for the current distance
1341  for (unsigned int ang = 0; ang < m_numSamplingAngles; ang++) {
1342  QString activationsLine = in.readLine(maxLineLength);
1343  QStringList activationsLineSplitted = activationsLine.split(" ", QString::SkipEmptyParts);
1344 
1345  // activationsLineSplitted should have m_numIR elements, all integers between 0 and 1023
1346  if (activationsLineSplitted.size() != int(m_numIR)) {
1347  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toLatin1().data());
1348  }
1349  // Reading activations
1350  for (unsigned int id = 0; id < m_numIR; id++) {
1351  bool ok;
1352  const unsigned int act = activationsLineSplitted[id].toUInt(&ok);
1353  if ((!ok) || (act > 1023)) {
1354  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toLatin1().data());
1355  }
1356  m_activations[i++] = act;
1357  }
1358  }
1359  }
1360 
1361  // The final row in the file should be "END"
1362  QString finalLine = in.readLine(maxLineLength);
1363  if (finalLine != "END") {
1364  throw SampleFileLoadingException(m_filename.toLatin1().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toLatin1().data());
1365  }
1366 }
1367 
1369 {
1370  // Nothing to do here
1371 }
1372 
1373 unsigned int SampledIRDataLoader::getActivation(unsigned int i, real dist, real ang) const
1374 {
1375  // Using the other version of the getActivation function
1376  QVector<unsigned int>::const_iterator it = getActivation(dist, ang);
1377 
1378  return *(it + i);
1379 }
1380 
1381 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const
1382 {
1383  const real distIndex = (dist - m_initialDistance) / m_distanceInterval;
1384  const unsigned int d = (distIndex < 0.0) ? 0 : (unsigned int) distIndex;
1385 
1386  // If we are over the maximum distance, returning all zeros
1387  if (d >= m_numDistances) {
1388  return m_nullActivations.begin();
1389  }
1390 
1391  // We first have to restrict the angle between 0.0 and 2*PI, then we can compute the index.
1392  const real normAng = normalizeRad02pi(ang);
1393  const real angIndex = (normAng / (2.0 * PI_GRECO)) * real(m_numSamplingAngles);
1394  unsigned int a = (angIndex < 0.0) ? 0 : (unsigned int) angIndex;
1395  if (a >= m_numSamplingAngles) {
1396  a = m_numSamplingAngles - 1;
1397  }
1398 
1399  return m_activations.begin() + getLinearIndex(0, a, d);
1400 }
1401 
1402 unsigned int SampledIRDataLoader::getLinearIndex(unsigned int id, unsigned int ang, unsigned int dist) const
1403 {
1404  // Inverting ang, positive angles in the file are clockwise angles
1405  ang = m_numSamplingAngles - ang - 1;
1406  return (dist * m_numSamplingAngles + ang) * m_numIR + id;
1407 }
1408 
1409 QColor getColorAtArenaGroundPosition(Arena* arena, wVector pos)
1410 {
1411  // Bringing the point on the plane
1412  pos.z = 0.0;
1413 
1414  // Taking the arena plane color by default
1415  QColor color = arena->getPlane()->color();
1416 
1417  // Now cycling through the objects in the arena
1418  const QVector<PhyObject2DWrapper*>& objectsList = arena->getObjects();
1419  foreach(PhyObject2DWrapper* obj, objectsList)
1420  {
1421  switch (obj->type())
1422  {
1423  case PhyObject2DWrapper::RectangularTargetArea:
1424  {
1425  Box2DWrapper* rectangularTargetArea = dynamic_cast<Box2DWrapper*>(obj);
1426 
1427  // Box properties
1428  const wVector center = rectangularTargetArea->centerOnPlane();
1429  const real halfWidth = rectangularTargetArea->width() / 2.0f;
1430  const real halfDepth = rectangularTargetArea->depth() / 2.0f;
1431  if ((pos.x >= center.x - halfWidth) && (pos.x <= center.x + halfWidth) &&
1432  (pos.y >= center.y - halfDepth) && (pos.y <= center.y + halfDepth)) {
1433  color = rectangularTargetArea->color();
1434  }
1435  }
1436  break;
1437  case PhyObject2DWrapper::CircularTargetArea:
1438  {
1439  Cylinder2DWrapper* circularTargetArea = dynamic_cast<Cylinder2DWrapper*>(obj);
1440 
1441  wVector areaCenterOnPlane = circularTargetArea->position();
1442  areaCenterOnPlane.z = 0.0;
1443  if ((pos - areaCenterOnPlane).norm() <= circularTargetArea->radius()) {
1444  color = circularTargetArea->color();
1445  }
1446  }
1447  break;
1448  default:
1449  break;
1450  }
1451  }
1452 
1453  return color;
1454 }
1455 
1456 } // end namespace farsa
void usableResources(QStringList resources)
const QString m_additionalInputsResource
The name of the resource associated with the vector of additional inputs.
Definition: sensors.h:145
FakeSensor(ConfigurationParameters &params, QString prefix)
Constructor.
Definition: sensors.cpp:42
FARSA_UTIL_TEMPLATE float linearMap(float x, float min=-10, float max=10, float outMin=-1, float outMax=1)
void drawCamera(bool d)
Sets whether to draw the linear camera or not.
Definition: sensors.cpp:1237
const double m_receptorRange
The range of each receptor.
Definition: sensors.cpp:471
virtual void render(RenderWObject *renderer, QGLContext *gw)
Performs the actual drawing.
Definition: sensors.cpp:817
LinearCameraGraphic(WObject *object, const wMatrix &transformation, QVector< SimpleInterval > receptorsRanges, QString name="unamed")
Constructor.
Definition: sensors.cpp:773
static QVector< double > getVector(ConfigurationParameters &params, QString paramPath, QString def=QString())
FARSA_UTIL_TEMPLATE real normalizeRad(real x)
void setUseColorTextureOfOwner(bool b)
static QString getString(ConfigurationParameters &params, QString paramPath, QString def=QString())
void declareResource(QString name, T *resource, QString lockBuddy="")
void addUsableResource(QString resource)
World * world()
wVector bbMax
maximum 3D point for linearization
Definition: sensors.h:192
QVector< QColor > m_receptors
The vector with perceived colors.
Definition: sensors.cpp:476
const QVector< SimpleInterval > m_receptorsRanges
The list of receptors.
Definition: sensors.cpp:965
QString name()
Return the name of the Sensor.
virtual void save(ConfigurationParameters &params, QString prefix)
Saves the parameters of the FakeSensor into the provided ConfigurationParameters object.
Definition: sensors.cpp:66
FARSA_UTIL_TEMPLATE real normalizeRad02pi(real x)
void attachToObject(WObject *object, bool makeOwner=false, const wMatrix &displacement=wMatrix::identity())
virtual bool nextNeuron()=0
Go to the next neuron of the current block.
~FakeSensor()
Destructor.
Definition: sensors.cpp:56
QMutex m_receptorsMutex
The mutex protecting the m_receptors variable.
Definition: sensors.cpp:484
void setTexture(QString textureName)
virtual bool setCurrentBlock(QString blockName)=0
Set the current blocks of neurons to iterate.
FARSA_UTIL_TEMPLATE const T max(const T &t1, const U &t2)
ObjectPositionSensor(ConfigurationParameters &params, QString prefix)
Constructor and Configure.
Definition: sensors.cpp:133
void update()
Updates the sensor reading.
Definition: sensors.cpp:1109
void update()
Update the state of the Sensor every time step.
Definition: sensors.cpp:167
wVector bbMin
minimum 3D point for linearization
Definition: sensors.h:190
The base abstract class for iterating over neurons of a neural network.
virtual void shareResourcesWith(ResourcesUser *buddy)
~ObjectPositionSensor()
Destructor.
Definition: sensors.cpp:154
void setColor(QColor c)
static void describe(QString type)
Describe all the parameter for configuring the Sensor.
static void describe(QString type)
Describes all the parameter needed to configure this class.
Definition: sensors.cpp:75
QString objectName
the object resource name
Definition: sensors.h:186
virtual void render(RenderWObject *renderer, QGLContext *gw)
Performs the actual drawing.
Definition: sensors.cpp:307
const double m_maxAngle
The maximum angle of the camera.
Definition: sensors.cpp:459
QVector< QColor > m_receptors
The vector with perceived colors.
Definition: sensors.cpp:970
const wMatrix & matrix() const
const QVector< PhyObject2DWrapper * > & getObjects() const
Returns the list of 2D objects.
Definition: arena.h:136
The base abstract class for the Sensor hierarchy.
void resourceChanged(QString resourceName, ResourceChangeType changeType)
The function called when a resource used here is changed.
Definition: sensors.cpp:192
int size()
Return the number of neurons on which the Sensor will set the input: 3.
Definition: sensors.cpp:188
The exception thrown when an error occurs during IR sample files loading.
Definition: sensors.h:693
static void info(QString msg)
const unsigned int m_numReceptors
The number of receptors.
Definition: sensors.cpp:464
The graphical representation of the linear camera.
Definition: sensors.cpp:757
virtual ~LinearCamera()
Destructor.
Definition: sensors.cpp:1041
void resetNeededResourcesCheck()
Resets the check on needed resources so that the next call to checkAllNeededResourcesExist() will per...
bool startObjectParameters(QString groupPath, QString typeName, ParameterSettable *object)
void setPerceivedColors(const QVector< QColor > &receptors)
Sets the colors perceived by the camera.
Definition: sensors.cpp:292
LinearCameraGraphic(WObject *object, const wMatrix &transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name="unamed")
Constructor.
Definition: sensors.cpp:261
virtual int size()
Returns the number of inputs.
Definition: sensors.cpp:99
ResourceVector< real > m_additionalInputs
The vector with additional inputs.
Definition: sensors.h:133
void drawCamera(bool d)
Sets whether to draw the linear camera or not.
Definition: sensors.cpp:704
static Descriptor addTypeDescription(QString type, QString shortHelp, QString longHelp=QString(""))
unsigned int getActivation(unsigned int i, real dist, real ang) const
Returns the activation of the given sensor at the given distance and angle.
Definition: sensors.cpp:1373
void update()
Updates the sensor reading.
Definition: sensors.cpp:579
float real
RenderWObjectContainer * container()
void setPerceivedColors(const QVector< QColor > &receptors)
Sets the colors perceived by the camera.
Definition: sensors.cpp:802
virtual void setInput(double value)=0
Set the input of the current neuron.
LinearCamera(WObject *obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor)
Constructor.
Definition: sensors.cpp:1005
WObject *const m_object
The object to which we are attached.
Definition: sensors.cpp:960
unsigned int size() const
virtual ~LinearCamera()
Destructor.
Definition: sensors.cpp:512
const QString m_neuronsIteratorResource
The name of th resource associated with the neural network iterator.
Definition: sensors.h:139
void save(ConfigurationParameters &params, QString prefix)
Save the parameters of the ObjectPositionSensor into the ConfigurationParameters. ...
Definition: sensors.cpp:211
virtual void update()
Updates the state of the Sensor every time step.
Definition: sensors.cpp:85
QString name() const
FARSA_UTIL_TEMPLATE const T min(const T &t1, const U &t2)
const double m_minAngle
The minimum angle of the camera.
Definition: sensors.cpp:454
NeuronsIterator * m_neuronsIterator
The object to iterate over neurons of the neural network.
Definition: sensors.h:150
unsigned int getNumReceptors() const
Returns the number of receptors.
Definition: sensors.h:554
virtual void setGraphicProperties(QString label, double minValue, double maxValue, QColor color)=0
Set the graphic properties for the current neuron (in case it will be visualized on a GUI) ...
void deleteResource(QString name)
QString neuronsIteratorResource
The name of th resource associated with the neural network iterator.
Definition: sensors.h:184
LinearCamera(WObject *obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor)
Constructor.
Definition: sensors.cpp:490
void save(ConfigurationParameters &params, QString prefix)
Save the parameters into the ConfigurationParameters.
void checkAllNeededResourcesExist()
Checks whether all resources we need are existing and throws an exception if they aren't...
WObject *const m_object
The object to which we are attached.
Definition: sensors.cpp:449
bool linearize
if true will use bbMin and bbMax to linearize the position into [0,1]
Definition: sensors.h:188
virtual void resourceChanged(QString resourceName, ResourceChangeType changeType)
The function called when a resource used here is changed.
Definition: sensors.cpp:113
QMutex m_receptorsMutex
The mutex protecting the m_receptors variable.
Definition: sensors.cpp:978
The graphical representation of the linear camera.
Definition: sensors.cpp:245
void createParameter(QString groupPath, QString parameter)
static void warning(QString msg)
static void describe(QString type)
Describe all the parameter for configuring the iCubArmJointsSensor.
Definition: sensors.cpp:158
SampledIRDataLoader(QString filename)
Constructor.
Definition: sensors.cpp:1269
void setupColorTexture(QGLContext *, RenderWObject *obj)
virtual void shareResourcesWith(ResourcesUser *other)
The function to share resources.
Definition: sensors.cpp:104
~SampledIRDataLoader()
Destructor.
Definition: sensors.cpp:1368