sensors.cpp
45 m_neuronsIteratorResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator"))),
46 m_additionalInputsResource(actualResourceNameForMultirobot(ConfigurationHelper::getString(params, prefix + "additionalInputsResource", "additionalInputs"))),
79 Descriptor d = addTypeDescription(type, "Adds input neurons that can be used for custom operations", "With this sensor you can specify how many additional inputs are needed in the controller. This also declares a resource that can be used to access the additional inputs");
80 d.describeInt("additionalInputs").def(1).limits(1,100).props(IsMandatory).help("The number of additional inputs that will be added to the controller (default 1)");
81 d.describeString("neuronsIterator").def("neuronsIterator").help("The name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
82 d.describeString("additionalInputsResource").def("additionalInputs").help("The name of the resource associated with the vector of additional inputs (default is \"additionalInputs\")");
135 neuronsIteratorResource = ConfigurationHelper::getString(params, prefix + "neuronsIterator", "neuronsIterator");
146 Logger::warning( QString("ObjectPositionSensor %1 - bbMin and/or bbMax parameters are not well specified; they will be ignored").arg(name()) );
160 Descriptor d = addTypeDescription( type, "Sensor for reading the three absolute coordinate (position into the worlf frame) of an object" );
161 d.describeString("neuronsIterator").def("neuronsIterator").help("the name of the resource associated with the neural network iterator (default is \"neuronsIterator\")");
162 d.describeString( "object" ).def( "object" ).props( IsMandatory ).help( "The name of the resource associated with the object to track with this sensor" );
163 d.describeReal( "bbMin" ).props( IsList ).help( "The minimum 3D point used for linearize the object position into [0,1]" );
164 d.describeReal( "bbMax" ).props( IsList ).help( "The maximum 3D point used for linearize the object position into [0,1]" );
192 void ObjectPositionSensor::resourceChanged(QString resourceName, ResourceChangeType changeType) {
218 params.createParameter( prefix, "bbMin", QString("%1 %2 %3").arg(bbMin[0]).arg(bbMin[1]).arg(bbMin[2]) );
219 params.createParameter( prefix, "bbMax", QString("%1 %2 %3").arg(bbMax[0]).arg(bbMax[1]).arg(bbMax[2]) );
261 LinearCameraGraphic(WObject *object, const wMatrix& transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name = "unamed") :
385 const wVector lineEnd = wVector(cos(curAngle), sin(curAngle), 0.0).scale(linearCameraReceptorsLength);
490 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
495 m_aperture((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture)),
552 // An helper structure memorizing information about colors in a single receptor. minAngle and maxAngle
553 // are used to store the current portion of the receptor for which we already know the color, while
554 // colorsAndFractions is the list of colors and the portion of the receptor occupied by that color
605 // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
618 objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
620 // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
626 // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
638 colorsRangesAndDistances.append(ColorRangeAndDistance(color, minAngle, m_apertureMax, distance));
641 colorsRangesAndDistances.append(ColorRangeAndDistance(color, m_apertureMin, maxAngle, distance));
644 if (((minAngle > m_apertureMin) && (minAngle < m_apertureMax)) || ((maxAngle > m_apertureMin) && (maxAngle < m_apertureMax))) {
645 colorsRangesAndDistances.append(ColorRangeAndDistance(color, max(minAngle, m_apertureMin), min(maxAngle, m_apertureMax), distance));
654 // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
656 colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, m_apertureMin, m_apertureMax, std::numeric_limits<double>::infinity()));
658 // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
661 for (QList<ColorRangeAndDistance>::const_iterator it = colorsRangesAndDistances.begin(); it != colorsRangesAndDistances.end(); ++it) {
664 const int maxIndex = min(double(m_numReceptors - 1), floor((it->maxAngle - m_apertureMin) / m_receptorRange));
680 colorsInReceptors[i].colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(it->color, fraction));
684 // The final step is to compute the resulting color for each receptor. See class description for a comment
690 for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorsInReceptors[i].colorsAndFractions.begin(); it != colorsInReceptors[i].colorsAndFractions.end(); ++it) {
695 m_receptors[i] = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
712 m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_apertureMin, m_apertureMax, m_numReceptors, "linearCamera");
773 LinearCameraGraphic(WObject *object, const wMatrix& transformation, QVector<SimpleInterval> receptorsRanges, QString name = "unamed") :
893 const wVector line1End = wVector(cos(m_receptorsRanges[i].start), sin(m_receptorsRanges[i].start), 0.0).scale(linearCameraReceptorsLength);
894 const wVector line2End = wVector(cos(m_receptorsRanges[i].end), sin(m_receptorsRanges[i].end), 0.0).scale(linearCameraReceptorsLength);
988 QVector<SimpleInterval> receptorsFromApertureAndNumReceptors(double aperture, unsigned int numReceptors)
992 aperture = ((aperture > (2.0 * PI_GRECO)) ? (2.0 * PI_GRECO) : ((aperture < 0.0) ? 0.0 : aperture));
998 r.append(SimpleInterval(apertureMin + i * receptorRange, apertureMin + (i + 1) * receptorRange));
1005 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor) :
1023 LinearCamera::LinearCamera(WObject* obj, wMatrix mtr, QVector<SimpleInterval> receptorsRanges, double maxDistance, QColor backgroundColor) :
1081 // An helper structure memorizing information about colors in a single receptor. curInterval is
1135 // For the moment we use the distance to order objects (see ColorRangeAndDistance::operator<), however
1148 objectsList[i]->computeLinearViewFieldOccupiedRange(currentMtr, rangesAndColors, distance, m_maxDistance);
1150 // computeLinearViewFieldOccupiedRange returns a negative distance if the object is outside the view field
1155 for (QVector<PhyObject2DWrapper::AngularRangeAndColor>::const_iterator it = rangesAndColors.constBegin(); it != rangesAndColors.end(); ++it) {
1156 // To safely compare with the aperture, we have to convert angles between -PI_GRECO and PI_GRECO
1176 // Now we can add the background color at the end of the list. It covers all receptors to be sure to fill
1178 colorsRangesAndDistances.append(ColorRangeAndDistance(m_backgroundColor, -PI_GRECO, PI_GRECO, std::numeric_limits<double>::infinity()));
1180 // The next step is to calculate the percentage of each color in the colorsRangesAndDistances list
1181 // in each receptor. Before doing it we initialize the colorsInReceptors list so that the current
1194 colorIt->curInterval.unite(SimpleInterval(receptorMinAngle, PI_GRECO)).unite(SimpleInterval(-PI_GRECO, receptorMaxAngle));
1200 for (QList<ColorRangeAndDistance>::const_iterator colRangeIt = colorsRangesAndDistances.begin(); colRangeIt != colorsRangesAndDistances.end(); ++colRangeIt) {
1206 #warning PROVARE A VEDERE QUANTE VOLTE curLength È DIVERSO DA newLength ANCHE SE DOVREBBE ESSERE UGUALE (PER ERRORI NUMERICI)
1210 colorIt->colorsAndFractions.append(ColorsInReceptor::ColorAndFraction(colRangeIt->color, fraction));
1215 // The final step is to compute the resulting color for each receptor. See class description for a comment
1223 for (QList<ColorsInReceptor::ColorAndFraction>::const_iterator it = colorIt2->colorsAndFractions.begin(); it != colorIt2->colorsAndFractions.end(); ++it) {
1228 *recpActIt = QColor::fromRgbF(min(1.0f, max(0.0f, red)), min(1.0f, max(0.0f, green)), min(1.0f, max(0.0f, blue)));
1245 m_graphicalCamera = new LinearCameraGraphic(m_object, m_transformation, m_receptorsRanges, "linearCamera");
1287 throw SampleFileLoadingException(m_filename.toLatin1().data(), "Cannot open file for reading");
1296 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Wrong format for the first line, expected 5 elements, got " + QString::number(confs.size())).toLatin1().data());
1303 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the first element of the first row: expected an unsigned integer, got \"" + confs[0] + "\"").toLatin1().data());
1307 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the second element of the first row: expected an unsigned integer, got \"" + confs[1] + "\"").toLatin1().data());
1311 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the third element of the first row: expected an unsigned integer, got \"" + confs[2] + "\"").toLatin1().data());
1315 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fourth element of the first row: expected a real number, got \"" + confs[3] + "\"").toLatin1().data());
1319 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Error reading the fifth element of the first row: expected a real number, got \"" + confs[4] + "\"").toLatin1().data());
1327 // Now reading the blocks. I use the id after "TURN" for a safety check, the original evorobot code used that
1336 if ((turnLineSplitted.size() != 2) || (turnLineSplitted[0] != "TURN") || (turnLineSplitted[1].toUInt() != dist)) {
1337 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid TURN line: \"" + turnLine + "\"").toLatin1().data());
1347 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (wrong number of elements, expected " + QString::number(m_numIR) + ", got " + QString::number(activationsLineSplitted.size()) + "): \"" + activationsLine + "\"").toLatin1().data());
1354 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("Invalid activations line (invalid activation value): \"" + activationsLineSplitted[id] + "\"").toLatin1().data());
1364 throw SampleFileLoadingException(m_filename.toLatin1().data(), ("The last line in the file should be \"END\", actual value: \"" + finalLine + "\"").toLatin1().data());
1381 QVector<unsigned int>::const_iterator SampledIRDataLoader::getActivation(real dist, real ang) const
1402 unsigned int SampledIRDataLoader::getLinearIndex(unsigned int id, unsigned int ang, unsigned int dist) const
void usableResources(QStringList resources)
const QString m_additionalInputsResource
The name of the resource associated with the vector of additional inputs.
Definition: sensors.h:145
FARSA_UTIL_TEMPLATE float linearMap(float x, float min=-10, float max=10, float outMin=-1, float outMax=1)
virtual void render(RenderWObject *renderer, QGLContext *gw)
Performs the actual drawing.
Definition: sensors.cpp:817
wMatrix tm
LinearCameraGraphic(WObject *object, const wMatrix &transformation, QVector< SimpleInterval > receptorsRanges, QString name="unamed")
Constructor.
Definition: sensors.cpp:773
static QVector< double > getVector(ConfigurationParameters ¶ms, QString paramPath, QString def=QString())
FARSA_UTIL_TEMPLATE real normalizeRad(real x)
void setUseColorTextureOfOwner(bool b)
static QString getString(ConfigurationParameters ¶ms, QString paramPath, QString def=QString())
void declareResource(QString name, T *resource, QString lockBuddy="")
void addUsableResource(QString resource)
World * world()
virtual void save(ConfigurationParameters ¶ms, QString prefix)
Saves the parameters of the FakeSensor into the provided ConfigurationParameters object.
Definition: sensors.cpp:66
FARSA_UTIL_TEMPLATE real normalizeRad02pi(real x)
void attachToObject(WObject *object, bool makeOwner=false, const wMatrix &displacement=wMatrix::identity())
virtual bool nextNeuron()=0
Go to the next neuron of the current block.
void setTexture(QString textureName)
virtual bool setCurrentBlock(QString blockName)=0
Set the current blocks of neurons to iterate.
FARSA_UTIL_TEMPLATE const T max(const T &t1, const U &t2)
ObjectPositionSensor(ConfigurationParameters ¶ms, QString prefix)
Constructor and Configure.
Definition: sensors.cpp:133
The base abstract class for iterating over neurons of a neural network.
Definition: neuroninterfaces.h:56
virtual void shareResourcesWith(ResourcesUser *buddy)
void setColor(QColor c)
static void describe(QString type)
Describe all the parameter for configuring the Sensor.
Definition: neuroninterfaces.cpp:148
static void describe(QString type)
Describes all the parameter needed to configure this class.
Definition: sensors.cpp:75
virtual void render(RenderWObject *renderer, QGLContext *gw)
Performs the actual drawing.
Definition: sensors.cpp:307
const wMatrix & matrix() const
const QVector< PhyObject2DWrapper * > & getObjects() const
Returns the list of 2D objects.
Definition: arena.h:136
void resourceChanged(QString resourceName, ResourceChangeType changeType)
The function called when a resource used here is changed.
Definition: sensors.cpp:192
int size()
Return the number of neurons on which the Sensor will set the input: 3.
Definition: sensors.cpp:188
The exception thrown when an error occurs during IR sample files loading.
Definition: sensors.h:693
static void info(QString msg)
The graphical representation of the linear camera.
Definition: sensors.cpp:757
void resetNeededResourcesCheck()
Resets the check on needed resources so that the next call to checkAllNeededResourcesExist() will per...
Definition: neuroninterfaces.cpp:180
bool startObjectParameters(QString groupPath, QString typeName, ParameterSettable *object)
void setPerceivedColors(const QVector< QColor > &receptors)
Sets the colors perceived by the camera.
Definition: sensors.cpp:292
LinearCameraGraphic(WObject *object, const wMatrix &transformation, double minAngle, double maxAngle, unsigned int numReceptors, QString name="unamed")
Constructor.
Definition: sensors.cpp:261
ResourceVector< real > m_additionalInputs
The vector with additional inputs.
Definition: sensors.h:133
static Descriptor addTypeDescription(QString type, QString shortHelp, QString longHelp=QString(""))
unsigned int getActivation(unsigned int i, real dist, real ang) const
Returns the activation of the given sensor at the given distance and angle.
Definition: sensors.cpp:1373
float real
RenderWObjectContainer * container()
void setPerceivedColors(const QVector< QColor > &receptors)
Sets the colors perceived by the camera.
Definition: sensors.cpp:802
virtual void setInput(double value)=0
Set the input of the current neuron.
LinearCamera(WObject *obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor)
Constructor.
Definition: sensors.cpp:1005
unsigned int size() const
const QString m_neuronsIteratorResource
The name of th resource associated with the neural network iterator.
Definition: sensors.h:139
void save(ConfigurationParameters ¶ms, QString prefix)
Save the parameters of the ObjectPositionSensor into the ConfigurationParameters. ...
Definition: sensors.cpp:211
QString name() const
FARSA_UTIL_TEMPLATE const T min(const T &t1, const U &t2)
NeuronsIterator * m_neuronsIterator
The object to iterate over neurons of the neural network.
Definition: sensors.h:150
virtual void setGraphicProperties(QString label, double minValue, double maxValue, QColor color)=0
Set the graphic properties for the current neuron (in case it will be visualized on a GUI) ...
void deleteResource(QString name)
QString neuronsIteratorResource
The name of th resource associated with the neural network iterator.
Definition: sensors.h:184
LinearCamera(WObject *obj, wMatrix mtr, double aperture, unsigned int numReceptors, double maxDistance, QColor backgroundColor)
Constructor.
Definition: sensors.cpp:490
void save(ConfigurationParameters ¶ms, QString prefix)
Save the parameters into the ConfigurationParameters.
Definition: neuroninterfaces.cpp:134
void checkAllNeededResourcesExist()
Checks whether all resources we need are existing and throws an exception if they aren't...
Definition: neuroninterfaces.cpp:165
bool linearize
if true will use bbMin and bbMax to linearize the position into [0,1]
Definition: sensors.h:188
virtual void resourceChanged(QString resourceName, ResourceChangeType changeType)
The function called when a resource used here is changed.
Definition: sensors.cpp:113
The graphical representation of the linear camera.
Definition: sensors.cpp:245
void createParameter(QString groupPath, QString parameter)
static void warning(QString msg)
static void describe(QString type)
Describe all the parameter for configuring the iCubArmJointsSensor.
Definition: sensors.cpp:158
IsMandatory
void setupColorTexture(QGLContext *, RenderWObject *obj)
virtual void shareResourcesWith(ResourcesUser *other)
The function to share resources.
Definition: sensors.cpp:104
ResourceChangeType